var/home/core/zuul-output/0000755000175000017500000000000015066574204014536 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015066602677015510 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000004766701415066602670017724 0ustar rootrootSep 29 21:25:20 crc systemd[1]: Starting Kubernetes Kubelet... Sep 29 21:25:21 crc restorecon[4674]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 21:25:21 crc restorecon[4674]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 21:25:21 crc restorecon[4674]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Sep 29 21:25:22 crc kubenswrapper[4911]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Sep 29 21:25:22 crc kubenswrapper[4911]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Sep 29 21:25:22 crc kubenswrapper[4911]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Sep 29 21:25:22 crc kubenswrapper[4911]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Sep 29 21:25:22 crc kubenswrapper[4911]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Sep 29 21:25:22 crc kubenswrapper[4911]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.440734 4911 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.448004 4911 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.448047 4911 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.448055 4911 feature_gate.go:330] unrecognized feature gate: InsightsConfig Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.448060 4911 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.448065 4911 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.448071 4911 feature_gate.go:330] unrecognized feature gate: SignatureStores Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.448077 4911 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.448084 4911 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.448090 4911 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.448096 4911 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.448103 4911 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.448110 4911 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.448116 4911 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.448123 4911 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.448130 4911 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.448136 4911 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.448143 4911 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.448150 4911 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.448157 4911 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.448162 4911 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.448168 4911 feature_gate.go:330] unrecognized feature gate: GatewayAPI Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.448173 4911 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.448178 4911 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.448184 4911 feature_gate.go:330] unrecognized feature gate: NewOLM Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.448191 4911 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.448199 4911 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.448204 4911 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.448209 4911 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.448216 4911 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.448221 4911 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.448227 4911 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.448233 4911 feature_gate.go:330] unrecognized feature gate: OVNObservability Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.448238 4911 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.448244 4911 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.448250 4911 feature_gate.go:330] unrecognized feature gate: Example Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.448262 4911 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.448268 4911 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.448272 4911 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.448278 4911 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.448283 4911 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.448291 4911 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.448297 4911 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.448303 4911 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.448309 4911 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.448314 4911 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.448319 4911 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.448324 4911 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.448330 4911 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.448339 4911 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.448344 4911 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.448349 4911 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.448354 4911 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.448359 4911 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.448364 4911 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.448369 4911 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.448374 4911 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.448380 4911 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.448388 4911 feature_gate.go:330] unrecognized feature gate: PinnedImages Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.448394 4911 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.448400 4911 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.448405 4911 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.448409 4911 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.448414 4911 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.448419 4911 feature_gate.go:330] unrecognized feature gate: PlatformOperators Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.448424 4911 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.448429 4911 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.448435 4911 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.448439 4911 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.448444 4911 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.448449 4911 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.448454 4911 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.448562 4911 flags.go:64] FLAG: --address="0.0.0.0" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.448575 4911 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.448585 4911 flags.go:64] FLAG: --anonymous-auth="true" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.448593 4911 flags.go:64] FLAG: --application-metrics-count-limit="100" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.448602 4911 flags.go:64] FLAG: --authentication-token-webhook="false" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.448609 4911 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.448617 4911 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.448625 4911 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.448631 4911 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.448638 4911 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.448644 4911 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.448651 4911 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.448657 4911 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.448663 4911 flags.go:64] FLAG: --cgroup-root="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.448669 4911 flags.go:64] FLAG: --cgroups-per-qos="true" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.448675 4911 flags.go:64] FLAG: --client-ca-file="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.448681 4911 flags.go:64] FLAG: --cloud-config="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.448687 4911 flags.go:64] FLAG: --cloud-provider="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.448693 4911 flags.go:64] FLAG: --cluster-dns="[]" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.448700 4911 flags.go:64] FLAG: --cluster-domain="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.448705 4911 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.448713 4911 flags.go:64] FLAG: --config-dir="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.448719 4911 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.448726 4911 flags.go:64] FLAG: --container-log-max-files="5" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.448735 4911 flags.go:64] FLAG: --container-log-max-size="10Mi" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.448742 4911 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.448748 4911 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.448755 4911 flags.go:64] FLAG: --containerd-namespace="k8s.io" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.448800 4911 flags.go:64] FLAG: --contention-profiling="false" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.448807 4911 flags.go:64] FLAG: --cpu-cfs-quota="true" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.448813 4911 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.448819 4911 flags.go:64] FLAG: --cpu-manager-policy="none" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.448826 4911 flags.go:64] FLAG: --cpu-manager-policy-options="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.448833 4911 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.448839 4911 flags.go:64] FLAG: --enable-controller-attach-detach="true" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.448845 4911 flags.go:64] FLAG: --enable-debugging-handlers="true" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.448851 4911 flags.go:64] FLAG: --enable-load-reader="false" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.448857 4911 flags.go:64] FLAG: --enable-server="true" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.448863 4911 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.448871 4911 flags.go:64] FLAG: --event-burst="100" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.448878 4911 flags.go:64] FLAG: --event-qps="50" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.448884 4911 flags.go:64] FLAG: --event-storage-age-limit="default=0" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.448890 4911 flags.go:64] FLAG: --event-storage-event-limit="default=0" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.448897 4911 flags.go:64] FLAG: --eviction-hard="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.448904 4911 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.448909 4911 flags.go:64] FLAG: --eviction-minimum-reclaim="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.448915 4911 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.448922 4911 flags.go:64] FLAG: --eviction-soft="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.448928 4911 flags.go:64] FLAG: --eviction-soft-grace-period="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.448934 4911 flags.go:64] FLAG: --exit-on-lock-contention="false" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.448941 4911 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.448948 4911 flags.go:64] FLAG: --experimental-mounter-path="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.448954 4911 flags.go:64] FLAG: --fail-cgroupv1="false" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.448960 4911 flags.go:64] FLAG: --fail-swap-on="true" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.448966 4911 flags.go:64] FLAG: --feature-gates="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.448980 4911 flags.go:64] FLAG: --file-check-frequency="20s" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.448986 4911 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449002 4911 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449008 4911 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449015 4911 flags.go:64] FLAG: --healthz-port="10248" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449021 4911 flags.go:64] FLAG: --help="false" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449028 4911 flags.go:64] FLAG: --hostname-override="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449033 4911 flags.go:64] FLAG: --housekeeping-interval="10s" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449040 4911 flags.go:64] FLAG: --http-check-frequency="20s" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449046 4911 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449052 4911 flags.go:64] FLAG: --image-credential-provider-config="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449058 4911 flags.go:64] FLAG: --image-gc-high-threshold="85" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449064 4911 flags.go:64] FLAG: --image-gc-low-threshold="80" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449070 4911 flags.go:64] FLAG: --image-service-endpoint="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449076 4911 flags.go:64] FLAG: --kernel-memcg-notification="false" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449082 4911 flags.go:64] FLAG: --kube-api-burst="100" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449089 4911 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449096 4911 flags.go:64] FLAG: --kube-api-qps="50" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449102 4911 flags.go:64] FLAG: --kube-reserved="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449108 4911 flags.go:64] FLAG: --kube-reserved-cgroup="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449114 4911 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449120 4911 flags.go:64] FLAG: --kubelet-cgroups="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449126 4911 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449133 4911 flags.go:64] FLAG: --lock-file="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449138 4911 flags.go:64] FLAG: --log-cadvisor-usage="false" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449144 4911 flags.go:64] FLAG: --log-flush-frequency="5s" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449150 4911 flags.go:64] FLAG: --log-json-info-buffer-size="0" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449160 4911 flags.go:64] FLAG: --log-json-split-stream="false" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449166 4911 flags.go:64] FLAG: --log-text-info-buffer-size="0" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449172 4911 flags.go:64] FLAG: --log-text-split-stream="false" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449178 4911 flags.go:64] FLAG: --logging-format="text" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449184 4911 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449190 4911 flags.go:64] FLAG: --make-iptables-util-chains="true" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449196 4911 flags.go:64] FLAG: --manifest-url="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449206 4911 flags.go:64] FLAG: --manifest-url-header="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449213 4911 flags.go:64] FLAG: --max-housekeeping-interval="15s" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449219 4911 flags.go:64] FLAG: --max-open-files="1000000" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449226 4911 flags.go:64] FLAG: --max-pods="110" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449233 4911 flags.go:64] FLAG: --maximum-dead-containers="-1" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449238 4911 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449244 4911 flags.go:64] FLAG: --memory-manager-policy="None" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449251 4911 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449257 4911 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449263 4911 flags.go:64] FLAG: --node-ip="192.168.126.11" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449269 4911 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449282 4911 flags.go:64] FLAG: --node-status-max-images="50" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449288 4911 flags.go:64] FLAG: --node-status-update-frequency="10s" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449294 4911 flags.go:64] FLAG: --oom-score-adj="-999" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449300 4911 flags.go:64] FLAG: --pod-cidr="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449305 4911 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449314 4911 flags.go:64] FLAG: --pod-manifest-path="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449319 4911 flags.go:64] FLAG: --pod-max-pids="-1" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449325 4911 flags.go:64] FLAG: --pods-per-core="0" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449331 4911 flags.go:64] FLAG: --port="10250" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449337 4911 flags.go:64] FLAG: --protect-kernel-defaults="false" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449342 4911 flags.go:64] FLAG: --provider-id="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449349 4911 flags.go:64] FLAG: --qos-reserved="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449356 4911 flags.go:64] FLAG: --read-only-port="10255" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449363 4911 flags.go:64] FLAG: --register-node="true" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449371 4911 flags.go:64] FLAG: --register-schedulable="true" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449377 4911 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449387 4911 flags.go:64] FLAG: --registry-burst="10" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449394 4911 flags.go:64] FLAG: --registry-qps="5" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449400 4911 flags.go:64] FLAG: --reserved-cpus="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449406 4911 flags.go:64] FLAG: --reserved-memory="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449413 4911 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449419 4911 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449427 4911 flags.go:64] FLAG: --rotate-certificates="false" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449434 4911 flags.go:64] FLAG: --rotate-server-certificates="false" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449440 4911 flags.go:64] FLAG: --runonce="false" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449445 4911 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449451 4911 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449457 4911 flags.go:64] FLAG: --seccomp-default="false" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449463 4911 flags.go:64] FLAG: --serialize-image-pulls="true" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449469 4911 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449475 4911 flags.go:64] FLAG: --storage-driver-db="cadvisor" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449481 4911 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449487 4911 flags.go:64] FLAG: --storage-driver-password="root" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449493 4911 flags.go:64] FLAG: --storage-driver-secure="false" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449499 4911 flags.go:64] FLAG: --storage-driver-table="stats" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449505 4911 flags.go:64] FLAG: --storage-driver-user="root" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449511 4911 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449517 4911 flags.go:64] FLAG: --sync-frequency="1m0s" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449523 4911 flags.go:64] FLAG: --system-cgroups="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449529 4911 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449538 4911 flags.go:64] FLAG: --system-reserved-cgroup="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449545 4911 flags.go:64] FLAG: --tls-cert-file="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449551 4911 flags.go:64] FLAG: --tls-cipher-suites="[]" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449558 4911 flags.go:64] FLAG: --tls-min-version="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449566 4911 flags.go:64] FLAG: --tls-private-key-file="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449571 4911 flags.go:64] FLAG: --topology-manager-policy="none" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449578 4911 flags.go:64] FLAG: --topology-manager-policy-options="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449584 4911 flags.go:64] FLAG: --topology-manager-scope="container" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449589 4911 flags.go:64] FLAG: --v="2" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449598 4911 flags.go:64] FLAG: --version="false" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449607 4911 flags.go:64] FLAG: --vmodule="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449615 4911 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.449621 4911 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.449759 4911 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.449768 4911 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.449776 4911 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.449782 4911 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.449814 4911 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.449820 4911 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.449825 4911 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.449830 4911 feature_gate.go:330] unrecognized feature gate: InsightsConfig Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.449835 4911 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.449841 4911 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.449847 4911 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.449853 4911 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.449858 4911 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.449863 4911 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.449872 4911 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.449878 4911 feature_gate.go:330] unrecognized feature gate: PlatformOperators Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.449884 4911 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.449890 4911 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.449895 4911 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.449901 4911 feature_gate.go:330] unrecognized feature gate: NewOLM Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.449907 4911 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.449912 4911 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.449917 4911 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.449922 4911 feature_gate.go:330] unrecognized feature gate: PinnedImages Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.449927 4911 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.449933 4911 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.449938 4911 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.449943 4911 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.449948 4911 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.449955 4911 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.449961 4911 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.449966 4911 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.449971 4911 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.449979 4911 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.449985 4911 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.449991 4911 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.449997 4911 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.450002 4911 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.450008 4911 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.450014 4911 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.450019 4911 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.450025 4911 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.450030 4911 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.450035 4911 feature_gate.go:330] unrecognized feature gate: Example Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.450040 4911 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.450045 4911 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.450050 4911 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.450055 4911 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.450060 4911 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.450065 4911 feature_gate.go:330] unrecognized feature gate: GatewayAPI Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.450070 4911 feature_gate.go:330] unrecognized feature gate: SignatureStores Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.450074 4911 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.450079 4911 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.450084 4911 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.450089 4911 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.450094 4911 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.450098 4911 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.450103 4911 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.450109 4911 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.450114 4911 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.450118 4911 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.450123 4911 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.450127 4911 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.450132 4911 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.450137 4911 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.450143 4911 feature_gate.go:330] unrecognized feature gate: OVNObservability Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.450148 4911 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.450153 4911 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.450158 4911 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.450163 4911 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.450168 4911 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.450185 4911 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.464953 4911 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.464998 4911 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465094 4911 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465105 4911 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465110 4911 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465117 4911 feature_gate.go:330] unrecognized feature gate: InsightsConfig Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465122 4911 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465128 4911 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465132 4911 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465136 4911 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465140 4911 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465144 4911 feature_gate.go:330] unrecognized feature gate: PinnedImages Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465148 4911 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465152 4911 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465156 4911 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465160 4911 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465164 4911 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465167 4911 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465171 4911 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465175 4911 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465179 4911 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465184 4911 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465192 4911 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465198 4911 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465204 4911 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465209 4911 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465214 4911 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465218 4911 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465223 4911 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465228 4911 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465232 4911 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465238 4911 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465243 4911 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465247 4911 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465255 4911 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465263 4911 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465269 4911 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465276 4911 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465283 4911 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465290 4911 feature_gate.go:330] unrecognized feature gate: Example Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465297 4911 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465303 4911 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465309 4911 feature_gate.go:330] unrecognized feature gate: GatewayAPI Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465314 4911 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465320 4911 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465327 4911 feature_gate.go:330] unrecognized feature gate: OVNObservability Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465333 4911 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465338 4911 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465343 4911 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465349 4911 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465355 4911 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465360 4911 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465364 4911 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465370 4911 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465376 4911 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465382 4911 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465387 4911 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465393 4911 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465399 4911 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465404 4911 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465408 4911 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465412 4911 feature_gate.go:330] unrecognized feature gate: SignatureStores Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465416 4911 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465421 4911 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465426 4911 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465430 4911 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465435 4911 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465440 4911 feature_gate.go:330] unrecognized feature gate: NewOLM Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465444 4911 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465448 4911 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465451 4911 feature_gate.go:330] unrecognized feature gate: PlatformOperators Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465455 4911 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465459 4911 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.465466 4911 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465581 4911 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465587 4911 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465591 4911 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465594 4911 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465598 4911 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465602 4911 feature_gate.go:330] unrecognized feature gate: PlatformOperators Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465606 4911 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465610 4911 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465613 4911 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465617 4911 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465620 4911 feature_gate.go:330] unrecognized feature gate: PinnedImages Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465624 4911 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465628 4911 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465632 4911 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465636 4911 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465639 4911 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465647 4911 feature_gate.go:330] unrecognized feature gate: Example Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465651 4911 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465655 4911 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465659 4911 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465662 4911 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465666 4911 feature_gate.go:330] unrecognized feature gate: OVNObservability Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465670 4911 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465673 4911 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465677 4911 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465680 4911 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465684 4911 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465687 4911 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465691 4911 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465695 4911 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465699 4911 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465703 4911 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465706 4911 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465710 4911 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465713 4911 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465717 4911 feature_gate.go:330] unrecognized feature gate: InsightsConfig Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465720 4911 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465725 4911 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465729 4911 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465734 4911 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465739 4911 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465743 4911 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465747 4911 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465751 4911 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465755 4911 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465759 4911 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465763 4911 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465766 4911 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465770 4911 feature_gate.go:330] unrecognized feature gate: SignatureStores Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465774 4911 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465777 4911 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465781 4911 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465785 4911 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465806 4911 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465810 4911 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465814 4911 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465817 4911 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465821 4911 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465824 4911 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465828 4911 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465833 4911 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465837 4911 feature_gate.go:330] unrecognized feature gate: NewOLM Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465841 4911 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465845 4911 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465849 4911 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465853 4911 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465857 4911 feature_gate.go:330] unrecognized feature gate: GatewayAPI Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465860 4911 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465864 4911 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465868 4911 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.465871 4911 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.465877 4911 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.466083 4911 server.go:940] "Client rotation is on, will bootstrap in background" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.471473 4911 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.471568 4911 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.473806 4911 server.go:997] "Starting client certificate rotation" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.473837 4911 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.475029 4911 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-11-08 12:32:12.942292949 +0000 UTC Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.475191 4911 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 951h6m50.467106905s for next certificate rotation Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.500175 4911 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.502650 4911 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.521221 4911 log.go:25] "Validated CRI v1 runtime API" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.559440 4911 log.go:25] "Validated CRI v1 image API" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.561615 4911 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.567989 4911 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-09-29-18-51-51-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.568030 4911 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.591278 4911 manager.go:217] Machine: {Timestamp:2025-09-29 21:25:22.587037736 +0000 UTC m=+0.564150427 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2799886 MemoryCapacity:33654128640 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:6cb362cf-0841-40fb-a840-f46642f78745 BootID:7dded9b5-8ab5-45b2-be5a-4613b6e8208f Filesystems:[{Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365412864 Type:vfs Inodes:821634 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:4108170 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827064320 Type:vfs Inodes:4108170 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:d9:f5:24 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:d9:f5:24 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:79:00:5b Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:d3:e8:60 Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:cd:04:fe Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:7e:b0:07 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:de:83:31:54:1d:78 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:92:b3:fb:6f:e0:42 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654128640 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.591568 4911 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.591897 4911 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.592276 4911 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.592505 4911 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.592547 4911 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.592848 4911 topology_manager.go:138] "Creating topology manager with none policy" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.592859 4911 container_manager_linux.go:303] "Creating device plugin manager" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.593449 4911 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.593484 4911 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.594551 4911 state_mem.go:36] "Initialized new in-memory state store" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.594679 4911 server.go:1245] "Using root directory" path="/var/lib/kubelet" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.598609 4911 kubelet.go:418] "Attempting to sync node with API server" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.598635 4911 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.598662 4911 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.598677 4911 kubelet.go:324] "Adding apiserver pod source" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.598691 4911 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.604315 4911 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.606441 4911 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.608166 4911 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.251:6443: connect: connection refused Sep 29 21:25:22 crc kubenswrapper[4911]: E0929 21:25:22.608254 4911 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.251:6443: connect: connection refused" logger="UnhandledError" Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.608182 4911 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.251:6443: connect: connection refused Sep 29 21:25:22 crc kubenswrapper[4911]: E0929 21:25:22.608320 4911 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.251:6443: connect: connection refused" logger="UnhandledError" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.609251 4911 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.611161 4911 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.611209 4911 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.611224 4911 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.611244 4911 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.611269 4911 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.611285 4911 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.611300 4911 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.611326 4911 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.611343 4911 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.611358 4911 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.611381 4911 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.611397 4911 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.613889 4911 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.614735 4911 server.go:1280] "Started kubelet" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.615700 4911 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.616191 4911 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.616416 4911 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.251:6443: connect: connection refused Sep 29 21:25:22 crc systemd[1]: Started Kubernetes Kubelet. Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.624685 4911 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Sep 29 21:25:22 crc kubenswrapper[4911]: E0929 21:25:22.628838 4911 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.251:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.1869dde915055559 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-09-29 21:25:22.614678873 +0000 UTC m=+0.591791604,LastTimestamp:2025-09-29 21:25:22.614678873 +0000 UTC m=+0.591791604,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.631763 4911 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.632091 4911 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-14 02:45:49.640201007 +0000 UTC Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.632158 4911 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.632240 4911 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 1085h20m27.007977457s for next certificate rotation Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.632612 4911 volume_manager.go:287] "The desired_state_of_world populator starts" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.632658 4911 volume_manager.go:289] "Starting Kubelet Volume Manager" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.632853 4911 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Sep 29 21:25:22 crc kubenswrapper[4911]: E0929 21:25:22.633067 4911 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Sep 29 21:25:22 crc kubenswrapper[4911]: E0929 21:25:22.633832 4911 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.251:6443: connect: connection refused" interval="200ms" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.634933 4911 factory.go:55] Registering systemd factory Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.635099 4911 factory.go:221] Registration of the systemd container factory successfully Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.639054 4911 factory.go:153] Registering CRI-O factory Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.639099 4911 factory.go:221] Registration of the crio container factory successfully Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.639248 4911 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.639299 4911 factory.go:103] Registering Raw factory Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.639334 4911 manager.go:1196] Started watching for new ooms in manager Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.639632 4911 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.251:6443: connect: connection refused Sep 29 21:25:22 crc kubenswrapper[4911]: E0929 21:25:22.640103 4911 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.251:6443: connect: connection refused" logger="UnhandledError" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.640716 4911 manager.go:319] Starting recovery of all containers Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.641333 4911 server.go:460] "Adding debug handlers to kubelet server" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.653602 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.653704 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.653735 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.653760 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.653788 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.653855 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.653882 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.653907 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.653937 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.653975 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.654002 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.654031 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.654059 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.654092 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.654119 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.654145 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.654175 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.654199 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.654225 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.654250 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.654274 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.654301 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.654326 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.654352 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.654379 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.654404 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.654433 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.654461 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.654485 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.654511 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.654536 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.654613 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.654669 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.654695 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.654738 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.654774 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.654835 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.654968 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.655017 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.655032 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.655044 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.655054 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.655065 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.655077 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.655089 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.655102 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.655112 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.655125 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.655136 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.655175 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.655186 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.655200 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.655245 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.655263 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.655278 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.655290 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.655302 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.655314 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.655328 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.655339 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.655351 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.655361 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.655373 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.655389 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.655403 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.658515 4911 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.658549 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.658565 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.658580 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.658592 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.658605 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.658617 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.658630 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.658644 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.658704 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.658717 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.658729 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.658741 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.658754 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.658768 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.658783 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.658812 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.658824 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.658835 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.658848 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.658859 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.658872 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.658884 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.658896 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.658908 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.658918 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.658928 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.658941 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.658952 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.658964 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.658976 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.658989 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.659001 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.659011 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.659024 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.659050 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.659075 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.659091 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.659102 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.659113 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.659133 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.659145 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.659157 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.659170 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.659183 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.659195 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.659210 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.659232 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.659247 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.659261 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.659274 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.659287 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.659302 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.659320 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.659333 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.659346 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.659362 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.659378 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.659448 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.659469 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.659484 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.659500 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.659543 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.659560 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.659575 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.659591 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.659605 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.659621 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.659671 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.659691 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.659706 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.659721 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.659763 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.659809 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.659829 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.659846 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.659862 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.659878 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.659894 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.659912 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.659928 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.659943 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.659958 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.659974 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.660016 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.660036 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.660052 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.660069 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.660083 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.660123 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.660136 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.660151 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.660163 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.660175 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.660192 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.660206 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.660223 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.660239 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.660256 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.660272 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.660285 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.660302 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.660318 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.660332 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.660344 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.660360 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.660374 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.660388 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.660402 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.660418 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.660434 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.660449 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.660467 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.660481 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.660495 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.660509 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.660523 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.660538 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.660549 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.660564 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.660575 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.660585 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.660597 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.660609 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.660619 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.660631 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.660641 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.660652 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.660663 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.660674 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.660684 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.660695 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.660705 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.660719 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.660730 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.660741 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.660750 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.660763 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.660773 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.660802 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.660814 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.660827 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.660838 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.660848 4911 reconstruct.go:97] "Volume reconstruction finished" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.660856 4911 reconciler.go:26] "Reconciler: start to sync state" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.673140 4911 manager.go:324] Recovery completed Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.683880 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.686390 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.686564 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.686680 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.687420 4911 cpu_manager.go:225] "Starting CPU manager" policy="none" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.687439 4911 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.687458 4911 state_mem.go:36] "Initialized new in-memory state store" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.697559 4911 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.699644 4911 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.699708 4911 status_manager.go:217] "Starting to sync pod status with apiserver" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.699753 4911 kubelet.go:2335] "Starting kubelet main sync loop" Sep 29 21:25:22 crc kubenswrapper[4911]: E0929 21:25:22.699836 4911 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Sep 29 21:25:22 crc kubenswrapper[4911]: W0929 21:25:22.701834 4911 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.251:6443: connect: connection refused Sep 29 21:25:22 crc kubenswrapper[4911]: E0929 21:25:22.701912 4911 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.251:6443: connect: connection refused" logger="UnhandledError" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.708840 4911 policy_none.go:49] "None policy: Start" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.712556 4911 memory_manager.go:170] "Starting memorymanager" policy="None" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.712602 4911 state_mem.go:35] "Initializing new in-memory state store" Sep 29 21:25:22 crc kubenswrapper[4911]: E0929 21:25:22.733276 4911 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.767721 4911 manager.go:334] "Starting Device Plugin manager" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.767959 4911 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.768025 4911 server.go:79] "Starting device plugin registration server" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.768663 4911 eviction_manager.go:189] "Eviction manager: starting control loop" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.768868 4911 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.769152 4911 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.769382 4911 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.769428 4911 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Sep 29 21:25:22 crc kubenswrapper[4911]: E0929 21:25:22.781410 4911 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.800616 4911 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc"] Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.800765 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.802574 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.802654 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.802674 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.803057 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.803361 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.803463 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.804565 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.804645 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.804671 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.804963 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.805097 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.805155 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.804967 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.805253 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.805291 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.806433 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.806492 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.806514 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.807683 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.807727 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.807745 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.807977 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.808276 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.808447 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.809547 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.809578 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.809614 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.809911 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.809922 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.809967 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.809993 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.810084 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.810134 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.811198 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.811227 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.811239 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.811321 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.811342 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.811352 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.811882 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.811940 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.812999 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.813048 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.813067 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:22 crc kubenswrapper[4911]: E0929 21:25:22.836405 4911 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.251:6443: connect: connection refused" interval="400ms" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.864181 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.864235 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.864263 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.864286 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.864312 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.864331 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.864348 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.864572 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.864620 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.864656 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.864727 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.864962 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.865058 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.865113 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.865162 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.869442 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.871118 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.871180 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.871191 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.871219 4911 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 29 21:25:22 crc kubenswrapper[4911]: E0929 21:25:22.871879 4911 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.251:6443: connect: connection refused" node="crc" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.966582 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.966658 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.966743 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.966765 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.966817 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.966840 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.966862 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.966900 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.966872 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.966976 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.966982 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.966996 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.966931 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.967035 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.966925 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.967094 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.966986 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.967189 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.967251 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.967334 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.967335 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.967432 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.967471 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.967511 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.967513 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.967550 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.967572 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.967598 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.967634 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 21:25:22 crc kubenswrapper[4911]: I0929 21:25:22.967710 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 21:25:23 crc kubenswrapper[4911]: I0929 21:25:23.072282 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 21:25:23 crc kubenswrapper[4911]: I0929 21:25:23.074191 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:23 crc kubenswrapper[4911]: I0929 21:25:23.074259 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:23 crc kubenswrapper[4911]: I0929 21:25:23.074281 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:23 crc kubenswrapper[4911]: I0929 21:25:23.074326 4911 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 29 21:25:23 crc kubenswrapper[4911]: E0929 21:25:23.074992 4911 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.251:6443: connect: connection refused" node="crc" Sep 29 21:25:23 crc kubenswrapper[4911]: I0929 21:25:23.126587 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 29 21:25:23 crc kubenswrapper[4911]: I0929 21:25:23.132463 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 21:25:23 crc kubenswrapper[4911]: I0929 21:25:23.147432 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Sep 29 21:25:23 crc kubenswrapper[4911]: I0929 21:25:23.164362 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 21:25:23 crc kubenswrapper[4911]: I0929 21:25:23.168342 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 21:25:23 crc kubenswrapper[4911]: W0929 21:25:23.183826 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-2d6878663c43d534a6f182b421260b4f1b3886f5cbeac2f6e2d35317684ee576 WatchSource:0}: Error finding container 2d6878663c43d534a6f182b421260b4f1b3886f5cbeac2f6e2d35317684ee576: Status 404 returned error can't find the container with id 2d6878663c43d534a6f182b421260b4f1b3886f5cbeac2f6e2d35317684ee576 Sep 29 21:25:23 crc kubenswrapper[4911]: W0929 21:25:23.185360 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-ac9eeca484599ce0c11573b31562b6a3b801c55878147d29233104dd02c5566f WatchSource:0}: Error finding container ac9eeca484599ce0c11573b31562b6a3b801c55878147d29233104dd02c5566f: Status 404 returned error can't find the container with id ac9eeca484599ce0c11573b31562b6a3b801c55878147d29233104dd02c5566f Sep 29 21:25:23 crc kubenswrapper[4911]: W0929 21:25:23.188966 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-97fc0e588da3ecfe3ae3ad1461930009787b991bd6060f791bd0c43ab14a8991 WatchSource:0}: Error finding container 97fc0e588da3ecfe3ae3ad1461930009787b991bd6060f791bd0c43ab14a8991: Status 404 returned error can't find the container with id 97fc0e588da3ecfe3ae3ad1461930009787b991bd6060f791bd0c43ab14a8991 Sep 29 21:25:23 crc kubenswrapper[4911]: W0929 21:25:23.194302 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-656faa67aee9f79715638de107772d22a0666b6616c3125a4a125bc6d90c1b12 WatchSource:0}: Error finding container 656faa67aee9f79715638de107772d22a0666b6616c3125a4a125bc6d90c1b12: Status 404 returned error can't find the container with id 656faa67aee9f79715638de107772d22a0666b6616c3125a4a125bc6d90c1b12 Sep 29 21:25:23 crc kubenswrapper[4911]: E0929 21:25:23.238475 4911 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.251:6443: connect: connection refused" interval="800ms" Sep 29 21:25:23 crc kubenswrapper[4911]: I0929 21:25:23.475339 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 21:25:23 crc kubenswrapper[4911]: I0929 21:25:23.477206 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:23 crc kubenswrapper[4911]: I0929 21:25:23.477282 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:23 crc kubenswrapper[4911]: I0929 21:25:23.477302 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:23 crc kubenswrapper[4911]: I0929 21:25:23.477347 4911 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 29 21:25:23 crc kubenswrapper[4911]: E0929 21:25:23.477901 4911 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.251:6443: connect: connection refused" node="crc" Sep 29 21:25:23 crc kubenswrapper[4911]: I0929 21:25:23.617653 4911 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.251:6443: connect: connection refused Sep 29 21:25:23 crc kubenswrapper[4911]: I0929 21:25:23.705275 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"97fc0e588da3ecfe3ae3ad1461930009787b991bd6060f791bd0c43ab14a8991"} Sep 29 21:25:23 crc kubenswrapper[4911]: I0929 21:25:23.707202 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"ac9eeca484599ce0c11573b31562b6a3b801c55878147d29233104dd02c5566f"} Sep 29 21:25:23 crc kubenswrapper[4911]: I0929 21:25:23.708837 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"992c15b2547b0772a25059ec485d41ea835ef15a83fa3301878f0d5e98731c7d"} Sep 29 21:25:23 crc kubenswrapper[4911]: I0929 21:25:23.710027 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"656faa67aee9f79715638de107772d22a0666b6616c3125a4a125bc6d90c1b12"} Sep 29 21:25:23 crc kubenswrapper[4911]: I0929 21:25:23.715063 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"2d6878663c43d534a6f182b421260b4f1b3886f5cbeac2f6e2d35317684ee576"} Sep 29 21:25:23 crc kubenswrapper[4911]: W0929 21:25:23.746891 4911 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.251:6443: connect: connection refused Sep 29 21:25:23 crc kubenswrapper[4911]: E0929 21:25:23.747024 4911 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.251:6443: connect: connection refused" logger="UnhandledError" Sep 29 21:25:24 crc kubenswrapper[4911]: E0929 21:25:24.040043 4911 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.251:6443: connect: connection refused" interval="1.6s" Sep 29 21:25:24 crc kubenswrapper[4911]: W0929 21:25:24.084007 4911 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.251:6443: connect: connection refused Sep 29 21:25:24 crc kubenswrapper[4911]: E0929 21:25:24.084171 4911 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.251:6443: connect: connection refused" logger="UnhandledError" Sep 29 21:25:24 crc kubenswrapper[4911]: W0929 21:25:24.106953 4911 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.251:6443: connect: connection refused Sep 29 21:25:24 crc kubenswrapper[4911]: E0929 21:25:24.107077 4911 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.251:6443: connect: connection refused" logger="UnhandledError" Sep 29 21:25:24 crc kubenswrapper[4911]: W0929 21:25:24.226628 4911 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.251:6443: connect: connection refused Sep 29 21:25:24 crc kubenswrapper[4911]: E0929 21:25:24.226823 4911 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.251:6443: connect: connection refused" logger="UnhandledError" Sep 29 21:25:24 crc kubenswrapper[4911]: I0929 21:25:24.278665 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 21:25:24 crc kubenswrapper[4911]: I0929 21:25:24.281321 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:24 crc kubenswrapper[4911]: I0929 21:25:24.281421 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:24 crc kubenswrapper[4911]: I0929 21:25:24.281448 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:24 crc kubenswrapper[4911]: I0929 21:25:24.281503 4911 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 29 21:25:24 crc kubenswrapper[4911]: E0929 21:25:24.282362 4911 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.251:6443: connect: connection refused" node="crc" Sep 29 21:25:24 crc kubenswrapper[4911]: I0929 21:25:24.618625 4911 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.251:6443: connect: connection refused Sep 29 21:25:24 crc kubenswrapper[4911]: I0929 21:25:24.719500 4911 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="c7e8a2aec2b50058d3e0f16b0756724feebf54c180e50dbbec1dc319c31234bd" exitCode=0 Sep 29 21:25:24 crc kubenswrapper[4911]: I0929 21:25:24.719618 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"c7e8a2aec2b50058d3e0f16b0756724feebf54c180e50dbbec1dc319c31234bd"} Sep 29 21:25:24 crc kubenswrapper[4911]: I0929 21:25:24.719687 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 21:25:24 crc kubenswrapper[4911]: I0929 21:25:24.720857 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:24 crc kubenswrapper[4911]: I0929 21:25:24.720889 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:24 crc kubenswrapper[4911]: I0929 21:25:24.720898 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:24 crc kubenswrapper[4911]: I0929 21:25:24.721284 4911 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c" exitCode=0 Sep 29 21:25:24 crc kubenswrapper[4911]: I0929 21:25:24.721363 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c"} Sep 29 21:25:24 crc kubenswrapper[4911]: I0929 21:25:24.721436 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 21:25:24 crc kubenswrapper[4911]: I0929 21:25:24.722616 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:24 crc kubenswrapper[4911]: I0929 21:25:24.722646 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:24 crc kubenswrapper[4911]: I0929 21:25:24.722660 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:24 crc kubenswrapper[4911]: I0929 21:25:24.723309 4911 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="0546d8270e2cbba06a241e421fa7ea130e53302a3f71f81829198d7ab8f3e6b6" exitCode=0 Sep 29 21:25:24 crc kubenswrapper[4911]: I0929 21:25:24.723351 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"0546d8270e2cbba06a241e421fa7ea130e53302a3f71f81829198d7ab8f3e6b6"} Sep 29 21:25:24 crc kubenswrapper[4911]: I0929 21:25:24.723462 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 21:25:24 crc kubenswrapper[4911]: I0929 21:25:24.724181 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:24 crc kubenswrapper[4911]: I0929 21:25:24.724207 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:24 crc kubenswrapper[4911]: I0929 21:25:24.724217 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:24 crc kubenswrapper[4911]: I0929 21:25:24.724711 4911 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="8f95f11783829a277767850aa2a8af98d27b5c5f205c36392671b69f368bffa1" exitCode=0 Sep 29 21:25:24 crc kubenswrapper[4911]: I0929 21:25:24.724778 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"8f95f11783829a277767850aa2a8af98d27b5c5f205c36392671b69f368bffa1"} Sep 29 21:25:24 crc kubenswrapper[4911]: I0929 21:25:24.724827 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 21:25:24 crc kubenswrapper[4911]: I0929 21:25:24.725505 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 21:25:24 crc kubenswrapper[4911]: I0929 21:25:24.725588 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:24 crc kubenswrapper[4911]: I0929 21:25:24.725614 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:24 crc kubenswrapper[4911]: I0929 21:25:24.725630 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:24 crc kubenswrapper[4911]: I0929 21:25:24.729736 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:24 crc kubenswrapper[4911]: I0929 21:25:24.729784 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:24 crc kubenswrapper[4911]: I0929 21:25:24.729827 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:24 crc kubenswrapper[4911]: I0929 21:25:24.731635 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"7088f5a3752b8778de247eacb408d511197d1501e8e2811a98a9b713ca1fca04"} Sep 29 21:25:24 crc kubenswrapper[4911]: I0929 21:25:24.731674 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"290e37c77297fae7dcb3540911f6b2917e26daee3623c9106e34de394ccfa01f"} Sep 29 21:25:24 crc kubenswrapper[4911]: I0929 21:25:24.731690 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"9d3bb437bdac371dd4e22f20f304a5381c128ac2b774c51bc4f748a1c7aa3834"} Sep 29 21:25:24 crc kubenswrapper[4911]: I0929 21:25:24.731703 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"129d51f84c8798ed7b1078f34899d4231c7bf4e1a75212f694c6c027b8463b9a"} Sep 29 21:25:24 crc kubenswrapper[4911]: I0929 21:25:24.731744 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 21:25:24 crc kubenswrapper[4911]: I0929 21:25:24.733278 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:24 crc kubenswrapper[4911]: I0929 21:25:24.733305 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:24 crc kubenswrapper[4911]: I0929 21:25:24.733316 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:25 crc kubenswrapper[4911]: I0929 21:25:25.210019 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 21:25:25 crc kubenswrapper[4911]: I0929 21:25:25.254138 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 21:25:25 crc kubenswrapper[4911]: I0929 21:25:25.617657 4911 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.251:6443: connect: connection refused Sep 29 21:25:25 crc kubenswrapper[4911]: E0929 21:25:25.641672 4911 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.251:6443: connect: connection refused" interval="3.2s" Sep 29 21:25:25 crc kubenswrapper[4911]: W0929 21:25:25.736955 4911 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.251:6443: connect: connection refused Sep 29 21:25:25 crc kubenswrapper[4911]: E0929 21:25:25.737091 4911 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.251:6443: connect: connection refused" logger="UnhandledError" Sep 29 21:25:25 crc kubenswrapper[4911]: I0929 21:25:25.758751 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 21:25:25 crc kubenswrapper[4911]: I0929 21:25:25.759270 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"4aab1c186df02eac1c6a9a54cf66510d44e4c63bad2da3cdbe53923869cc01cf"} Sep 29 21:25:25 crc kubenswrapper[4911]: I0929 21:25:25.759340 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"ed4c0d97fe39092f5b2cf1e4575d8ea9238b60085270aec20f28727379f9a1a2"} Sep 29 21:25:25 crc kubenswrapper[4911]: I0929 21:25:25.759358 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"af9e660ab2714b6ff9ddefb3634d2ae48dabf0a144b5f9ba96d429654fde989f"} Sep 29 21:25:25 crc kubenswrapper[4911]: I0929 21:25:25.762511 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:25 crc kubenswrapper[4911]: I0929 21:25:25.762589 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:25 crc kubenswrapper[4911]: I0929 21:25:25.762621 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:25 crc kubenswrapper[4911]: I0929 21:25:25.764876 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"47cf5d1e65f887e84d3c0694d5038edf3451c4af14d08a29d48562d05e36ea26"} Sep 29 21:25:25 crc kubenswrapper[4911]: I0929 21:25:25.764942 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"1b32578d3e75bb839ad134f88850ba1b26e57846431fac71b64c5ddbce802464"} Sep 29 21:25:25 crc kubenswrapper[4911]: I0929 21:25:25.764959 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"1f4539e6f57be0c686d5ea6ccbf1537c7c93698f81d2fdcfe5a143ce1f014e09"} Sep 29 21:25:25 crc kubenswrapper[4911]: I0929 21:25:25.764969 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"0ff3cf4013e69405637fe1483d5eb8ab2d12a0ba222c8446d93445dcb94ac9b2"} Sep 29 21:25:25 crc kubenswrapper[4911]: I0929 21:25:25.768154 4911 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="123ea91d0a01f780d09196824c362eab68a4b8d943d414e80a3a0bbc2959e178" exitCode=0 Sep 29 21:25:25 crc kubenswrapper[4911]: I0929 21:25:25.768291 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"123ea91d0a01f780d09196824c362eab68a4b8d943d414e80a3a0bbc2959e178"} Sep 29 21:25:25 crc kubenswrapper[4911]: I0929 21:25:25.768333 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 21:25:25 crc kubenswrapper[4911]: I0929 21:25:25.774193 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:25 crc kubenswrapper[4911]: I0929 21:25:25.774255 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:25 crc kubenswrapper[4911]: I0929 21:25:25.774270 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:25 crc kubenswrapper[4911]: I0929 21:25:25.777201 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"521e40cd673dac9022f408c58f537d4504f4077392fc1ea2ebf51f126f7ae6c4"} Sep 29 21:25:25 crc kubenswrapper[4911]: I0929 21:25:25.778477 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 21:25:25 crc kubenswrapper[4911]: I0929 21:25:25.778699 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 21:25:25 crc kubenswrapper[4911]: I0929 21:25:25.780875 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:25 crc kubenswrapper[4911]: I0929 21:25:25.781020 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:25 crc kubenswrapper[4911]: I0929 21:25:25.781110 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:25 crc kubenswrapper[4911]: I0929 21:25:25.781137 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:25 crc kubenswrapper[4911]: I0929 21:25:25.781055 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:25 crc kubenswrapper[4911]: I0929 21:25:25.781674 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:25 crc kubenswrapper[4911]: I0929 21:25:25.883775 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 21:25:25 crc kubenswrapper[4911]: I0929 21:25:25.888039 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:25 crc kubenswrapper[4911]: I0929 21:25:25.888140 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:25 crc kubenswrapper[4911]: I0929 21:25:25.888152 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:25 crc kubenswrapper[4911]: I0929 21:25:25.888220 4911 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 29 21:25:25 crc kubenswrapper[4911]: E0929 21:25:25.888780 4911 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.251:6443: connect: connection refused" node="crc" Sep 29 21:25:26 crc kubenswrapper[4911]: I0929 21:25:26.068847 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 21:25:26 crc kubenswrapper[4911]: I0929 21:25:26.786723 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"9dfa222360c949632050bd1dc5453a14c0dfa0d4614282132ceea2297405443e"} Sep 29 21:25:26 crc kubenswrapper[4911]: I0929 21:25:26.786879 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 21:25:26 crc kubenswrapper[4911]: I0929 21:25:26.788639 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:26 crc kubenswrapper[4911]: I0929 21:25:26.788718 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:26 crc kubenswrapper[4911]: I0929 21:25:26.788740 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:26 crc kubenswrapper[4911]: I0929 21:25:26.790202 4911 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="17751d3a9dd568ddb6c27efe08354f37aa4af7a91312e8c41e3e2e509d5be8ba" exitCode=0 Sep 29 21:25:26 crc kubenswrapper[4911]: I0929 21:25:26.790336 4911 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 21:25:26 crc kubenswrapper[4911]: I0929 21:25:26.790376 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 21:25:26 crc kubenswrapper[4911]: I0929 21:25:26.790388 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 21:25:26 crc kubenswrapper[4911]: I0929 21:25:26.790415 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"17751d3a9dd568ddb6c27efe08354f37aa4af7a91312e8c41e3e2e509d5be8ba"} Sep 29 21:25:26 crc kubenswrapper[4911]: I0929 21:25:26.790499 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 21:25:26 crc kubenswrapper[4911]: I0929 21:25:26.790610 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 21:25:26 crc kubenswrapper[4911]: I0929 21:25:26.791917 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:26 crc kubenswrapper[4911]: I0929 21:25:26.791964 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:26 crc kubenswrapper[4911]: I0929 21:25:26.791987 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:26 crc kubenswrapper[4911]: I0929 21:25:26.792047 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:26 crc kubenswrapper[4911]: I0929 21:25:26.792105 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:26 crc kubenswrapper[4911]: I0929 21:25:26.792125 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:26 crc kubenswrapper[4911]: I0929 21:25:26.792403 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:26 crc kubenswrapper[4911]: I0929 21:25:26.792466 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:26 crc kubenswrapper[4911]: I0929 21:25:26.792411 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:26 crc kubenswrapper[4911]: I0929 21:25:26.792486 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:26 crc kubenswrapper[4911]: I0929 21:25:26.792513 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:26 crc kubenswrapper[4911]: I0929 21:25:26.792534 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:27 crc kubenswrapper[4911]: I0929 21:25:27.338200 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 21:25:27 crc kubenswrapper[4911]: I0929 21:25:27.796329 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"2db0acd86c2dfcd0303626aa6b66832b18aec98dab908328288cdf0e0a18f24d"} Sep 29 21:25:27 crc kubenswrapper[4911]: I0929 21:25:27.796388 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"6a594d1677deac968c88fac4a64d4b047cf02ad8d6db74ee2ebe170d4135f588"} Sep 29 21:25:27 crc kubenswrapper[4911]: I0929 21:25:27.796402 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 21:25:27 crc kubenswrapper[4911]: I0929 21:25:27.796401 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 21:25:27 crc kubenswrapper[4911]: I0929 21:25:27.796405 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"11c497bb84920c85649975dc5a3ba883723017d5503200e47c08427f45b9edfb"} Sep 29 21:25:27 crc kubenswrapper[4911]: I0929 21:25:27.796604 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"cf2588ddec2be0fa843630cdc268617c399a7a1615705cc4b088fc806031d557"} Sep 29 21:25:27 crc kubenswrapper[4911]: I0929 21:25:27.796361 4911 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 21:25:27 crc kubenswrapper[4911]: I0929 21:25:27.796645 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 21:25:27 crc kubenswrapper[4911]: I0929 21:25:27.797522 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:27 crc kubenswrapper[4911]: I0929 21:25:27.797557 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:27 crc kubenswrapper[4911]: I0929 21:25:27.797569 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:27 crc kubenswrapper[4911]: I0929 21:25:27.797639 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:27 crc kubenswrapper[4911]: I0929 21:25:27.797669 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:27 crc kubenswrapper[4911]: I0929 21:25:27.797683 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:27 crc kubenswrapper[4911]: I0929 21:25:27.797741 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:27 crc kubenswrapper[4911]: I0929 21:25:27.797834 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:27 crc kubenswrapper[4911]: I0929 21:25:27.797891 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:28 crc kubenswrapper[4911]: I0929 21:25:28.802247 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"72188960402faa7956adf357824c6ea0a7015a75ed5191853ca95b12feb2cd28"} Sep 29 21:25:28 crc kubenswrapper[4911]: I0929 21:25:28.802395 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 21:25:28 crc kubenswrapper[4911]: I0929 21:25:28.803151 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:28 crc kubenswrapper[4911]: I0929 21:25:28.803183 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:28 crc kubenswrapper[4911]: I0929 21:25:28.803193 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:29 crc kubenswrapper[4911]: I0929 21:25:29.089634 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 21:25:29 crc kubenswrapper[4911]: I0929 21:25:29.090801 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:29 crc kubenswrapper[4911]: I0929 21:25:29.090836 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:29 crc kubenswrapper[4911]: I0929 21:25:29.090849 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:29 crc kubenswrapper[4911]: I0929 21:25:29.090876 4911 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 29 21:25:29 crc kubenswrapper[4911]: I0929 21:25:29.471123 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 21:25:29 crc kubenswrapper[4911]: I0929 21:25:29.471374 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 21:25:29 crc kubenswrapper[4911]: I0929 21:25:29.472993 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:29 crc kubenswrapper[4911]: I0929 21:25:29.473043 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:29 crc kubenswrapper[4911]: I0929 21:25:29.473056 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:29 crc kubenswrapper[4911]: I0929 21:25:29.759246 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 21:25:29 crc kubenswrapper[4911]: I0929 21:25:29.804463 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 21:25:29 crc kubenswrapper[4911]: I0929 21:25:29.804507 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 21:25:29 crc kubenswrapper[4911]: I0929 21:25:29.805400 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:29 crc kubenswrapper[4911]: I0929 21:25:29.805432 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:29 crc kubenswrapper[4911]: I0929 21:25:29.805441 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:29 crc kubenswrapper[4911]: I0929 21:25:29.805508 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:29 crc kubenswrapper[4911]: I0929 21:25:29.805542 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:29 crc kubenswrapper[4911]: I0929 21:25:29.805551 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:30 crc kubenswrapper[4911]: I0929 21:25:30.440981 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 21:25:30 crc kubenswrapper[4911]: I0929 21:25:30.441299 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 21:25:30 crc kubenswrapper[4911]: I0929 21:25:30.442833 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:30 crc kubenswrapper[4911]: I0929 21:25:30.442872 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:30 crc kubenswrapper[4911]: I0929 21:25:30.442885 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:31 crc kubenswrapper[4911]: I0929 21:25:31.134110 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Sep 29 21:25:31 crc kubenswrapper[4911]: I0929 21:25:31.134431 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 21:25:31 crc kubenswrapper[4911]: I0929 21:25:31.136424 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:31 crc kubenswrapper[4911]: I0929 21:25:31.136471 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:31 crc kubenswrapper[4911]: I0929 21:25:31.136488 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:31 crc kubenswrapper[4911]: I0929 21:25:31.175033 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 21:25:31 crc kubenswrapper[4911]: I0929 21:25:31.175384 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 21:25:31 crc kubenswrapper[4911]: I0929 21:25:31.177050 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:31 crc kubenswrapper[4911]: I0929 21:25:31.177111 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:31 crc kubenswrapper[4911]: I0929 21:25:31.177124 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:31 crc kubenswrapper[4911]: I0929 21:25:31.774722 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 21:25:31 crc kubenswrapper[4911]: I0929 21:25:31.775034 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 21:25:31 crc kubenswrapper[4911]: I0929 21:25:31.776960 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:31 crc kubenswrapper[4911]: I0929 21:25:31.777106 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:31 crc kubenswrapper[4911]: I0929 21:25:31.777139 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:32 crc kubenswrapper[4911]: E0929 21:25:32.781574 4911 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Sep 29 21:25:33 crc kubenswrapper[4911]: I0929 21:25:33.441911 4911 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Sep 29 21:25:33 crc kubenswrapper[4911]: I0929 21:25:33.442063 4911 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 29 21:25:35 crc kubenswrapper[4911]: I0929 21:25:35.718575 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Sep 29 21:25:35 crc kubenswrapper[4911]: I0929 21:25:35.718889 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 21:25:35 crc kubenswrapper[4911]: I0929 21:25:35.720736 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:35 crc kubenswrapper[4911]: I0929 21:25:35.721018 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:35 crc kubenswrapper[4911]: I0929 21:25:35.721160 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:36 crc kubenswrapper[4911]: W0929 21:25:36.364892 4911 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout Sep 29 21:25:36 crc kubenswrapper[4911]: I0929 21:25:36.365008 4911 trace.go:236] Trace[1825043596]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (29-Sep-2025 21:25:26.364) (total time: 10000ms): Sep 29 21:25:36 crc kubenswrapper[4911]: Trace[1825043596]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10000ms (21:25:36.364) Sep 29 21:25:36 crc kubenswrapper[4911]: Trace[1825043596]: [10.000810581s] [10.000810581s] END Sep 29 21:25:36 crc kubenswrapper[4911]: E0929 21:25:36.365047 4911 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Sep 29 21:25:36 crc kubenswrapper[4911]: W0929 21:25:36.583259 4911 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout Sep 29 21:25:36 crc kubenswrapper[4911]: I0929 21:25:36.583414 4911 trace.go:236] Trace[1436335737]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (29-Sep-2025 21:25:26.581) (total time: 10001ms): Sep 29 21:25:36 crc kubenswrapper[4911]: Trace[1436335737]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (21:25:36.583) Sep 29 21:25:36 crc kubenswrapper[4911]: Trace[1436335737]: [10.001915965s] [10.001915965s] END Sep 29 21:25:36 crc kubenswrapper[4911]: E0929 21:25:36.583453 4911 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Sep 29 21:25:36 crc kubenswrapper[4911]: I0929 21:25:36.619168 4911 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Sep 29 21:25:36 crc kubenswrapper[4911]: W0929 21:25:36.704257 4911 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": net/http: TLS handshake timeout Sep 29 21:25:36 crc kubenswrapper[4911]: I0929 21:25:36.704401 4911 trace.go:236] Trace[558733015]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (29-Sep-2025 21:25:26.703) (total time: 10001ms): Sep 29 21:25:36 crc kubenswrapper[4911]: Trace[558733015]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (21:25:36.704) Sep 29 21:25:36 crc kubenswrapper[4911]: Trace[558733015]: [10.001295396s] [10.001295396s] END Sep 29 21:25:36 crc kubenswrapper[4911]: E0929 21:25:36.704452 4911 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Sep 29 21:25:36 crc kubenswrapper[4911]: I0929 21:25:36.830731 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Sep 29 21:25:36 crc kubenswrapper[4911]: I0929 21:25:36.833253 4911 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Liveness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Sep 29 21:25:36 crc kubenswrapper[4911]: I0929 21:25:36.833356 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Sep 29 21:25:36 crc kubenswrapper[4911]: I0929 21:25:36.833706 4911 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="9dfa222360c949632050bd1dc5453a14c0dfa0d4614282132ceea2297405443e" exitCode=255 Sep 29 21:25:36 crc kubenswrapper[4911]: I0929 21:25:36.833764 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"9dfa222360c949632050bd1dc5453a14c0dfa0d4614282132ceea2297405443e"} Sep 29 21:25:36 crc kubenswrapper[4911]: I0929 21:25:36.833972 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 21:25:36 crc kubenswrapper[4911]: I0929 21:25:36.834853 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:36 crc kubenswrapper[4911]: I0929 21:25:36.834928 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:36 crc kubenswrapper[4911]: I0929 21:25:36.834944 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:36 crc kubenswrapper[4911]: I0929 21:25:36.835683 4911 scope.go:117] "RemoveContainer" containerID="9dfa222360c949632050bd1dc5453a14c0dfa0d4614282132ceea2297405443e" Sep 29 21:25:36 crc kubenswrapper[4911]: I0929 21:25:36.844455 4911 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Sep 29 21:25:36 crc kubenswrapper[4911]: I0929 21:25:36.844512 4911 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Sep 29 21:25:36 crc kubenswrapper[4911]: I0929 21:25:36.868806 4911 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Sep 29 21:25:36 crc kubenswrapper[4911]: I0929 21:25:36.868884 4911 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Sep 29 21:25:37 crc kubenswrapper[4911]: I0929 21:25:37.342616 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 21:25:37 crc kubenswrapper[4911]: I0929 21:25:37.342768 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 21:25:37 crc kubenswrapper[4911]: I0929 21:25:37.344000 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:37 crc kubenswrapper[4911]: I0929 21:25:37.344052 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:37 crc kubenswrapper[4911]: I0929 21:25:37.344062 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:37 crc kubenswrapper[4911]: I0929 21:25:37.840143 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Sep 29 21:25:37 crc kubenswrapper[4911]: I0929 21:25:37.841966 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"57c9f68393ebed4fd5fe219544b0585a743989f8c54239e19ec702bddf9c30f0"} Sep 29 21:25:37 crc kubenswrapper[4911]: I0929 21:25:37.842186 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 21:25:37 crc kubenswrapper[4911]: I0929 21:25:37.843030 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:37 crc kubenswrapper[4911]: I0929 21:25:37.843065 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:37 crc kubenswrapper[4911]: I0929 21:25:37.843076 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:39 crc kubenswrapper[4911]: I0929 21:25:39.471871 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 21:25:39 crc kubenswrapper[4911]: I0929 21:25:39.472077 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 21:25:39 crc kubenswrapper[4911]: I0929 21:25:39.482740 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:39 crc kubenswrapper[4911]: I0929 21:25:39.482827 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:39 crc kubenswrapper[4911]: I0929 21:25:39.482841 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:40 crc kubenswrapper[4911]: I0929 21:25:40.130513 4911 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Sep 29 21:25:40 crc kubenswrapper[4911]: I0929 21:25:40.230412 4911 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Sep 29 21:25:40 crc kubenswrapper[4911]: I0929 21:25:40.364260 4911 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Sep 29 21:25:40 crc kubenswrapper[4911]: I0929 21:25:40.610482 4911 apiserver.go:52] "Watching apiserver" Sep 29 21:25:40 crc kubenswrapper[4911]: I0929 21:25:40.615181 4911 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Sep 29 21:25:40 crc kubenswrapper[4911]: I0929 21:25:40.615492 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf"] Sep 29 21:25:40 crc kubenswrapper[4911]: I0929 21:25:40.616150 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:25:40 crc kubenswrapper[4911]: I0929 21:25:40.616265 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 21:25:40 crc kubenswrapper[4911]: E0929 21:25:40.616345 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 21:25:40 crc kubenswrapper[4911]: I0929 21:25:40.616396 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 29 21:25:40 crc kubenswrapper[4911]: I0929 21:25:40.616403 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 29 21:25:40 crc kubenswrapper[4911]: I0929 21:25:40.616418 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 21:25:40 crc kubenswrapper[4911]: E0929 21:25:40.616454 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 21:25:40 crc kubenswrapper[4911]: I0929 21:25:40.616493 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 21:25:40 crc kubenswrapper[4911]: E0929 21:25:40.616703 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 21:25:40 crc kubenswrapper[4911]: I0929 21:25:40.618045 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Sep 29 21:25:40 crc kubenswrapper[4911]: I0929 21:25:40.619180 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Sep 29 21:25:40 crc kubenswrapper[4911]: I0929 21:25:40.619341 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Sep 29 21:25:40 crc kubenswrapper[4911]: I0929 21:25:40.619446 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Sep 29 21:25:40 crc kubenswrapper[4911]: I0929 21:25:40.619522 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Sep 29 21:25:40 crc kubenswrapper[4911]: I0929 21:25:40.619748 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Sep 29 21:25:40 crc kubenswrapper[4911]: I0929 21:25:40.620694 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Sep 29 21:25:40 crc kubenswrapper[4911]: I0929 21:25:40.620832 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Sep 29 21:25:40 crc kubenswrapper[4911]: I0929 21:25:40.622825 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Sep 29 21:25:40 crc kubenswrapper[4911]: I0929 21:25:40.634231 4911 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Sep 29 21:25:40 crc kubenswrapper[4911]: I0929 21:25:40.648553 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 21:25:40 crc kubenswrapper[4911]: I0929 21:25:40.672234 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 21:25:40 crc kubenswrapper[4911]: I0929 21:25:40.684383 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 21:25:40 crc kubenswrapper[4911]: I0929 21:25:40.697501 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 21:25:40 crc kubenswrapper[4911]: I0929 21:25:40.735707 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 21:25:40 crc kubenswrapper[4911]: I0929 21:25:40.762527 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 21:25:40 crc kubenswrapper[4911]: I0929 21:25:40.777604 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 21:25:40 crc kubenswrapper[4911]: I0929 21:25:40.794242 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.179328 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.183061 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.194027 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.195973 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.207243 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.222454 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.232351 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.242107 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.252090 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.266000 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.277547 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.287878 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.300063 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.312058 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.329185 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.343213 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f56832a-7fd7-428e-85ee-55c5dea7b67a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ff3cf4013e69405637fe1483d5eb8ab2d12a0ba222c8446d93445dcb94ac9b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b32578d3e75bb839ad134f88850ba1b26e57846431fac71b64c5ddbce802464\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4539e6f57be0c686d5ea6ccbf1537c7c93698f81d2fdcfe5a143ce1f014e09\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57c9f68393ebed4fd5fe219544b0585a743989f8c54239e19ec702bddf9c30f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9dfa222360c949632050bd1dc5453a14c0dfa0d4614282132ceea2297405443e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T21:25:36Z\\\",\\\"message\\\":\\\"W0929 21:25:25.867198 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 21:25:25.867655 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759181125 cert, and key in /tmp/serving-cert-2091738680/serving-signer.crt, /tmp/serving-cert-2091738680/serving-signer.key\\\\nI0929 21:25:26.230899 1 observer_polling.go:159] Starting file observer\\\\nW0929 21:25:26.235864 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 21:25:26.236118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 21:25:26.238856 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2091738680/tls.crt::/tmp/serving-cert-2091738680/tls.key\\\\\\\"\\\\nF0929 21:25:36.606155 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47cf5d1e65f887e84d3c0694d5038edf3451c4af14d08a29d48562d05e36ea26\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.700969 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 21:25:41 crc kubenswrapper[4911]: E0929 21:25:41.701163 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 21:25:41 crc kubenswrapper[4911]: E0929 21:25:41.848062 4911 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="6.4s" Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.852656 4911 trace.go:236] Trace[3457365]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (29-Sep-2025 21:25:30.248) (total time: 11603ms): Sep 29 21:25:41 crc kubenswrapper[4911]: Trace[3457365]: ---"Objects listed" error: 11603ms (21:25:41.852) Sep 29 21:25:41 crc kubenswrapper[4911]: Trace[3457365]: [11.603624774s] [11.603624774s] END Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.852690 4911 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Sep 29 21:25:41 crc kubenswrapper[4911]: E0929 21:25:41.853146 4911 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.855246 4911 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.926350 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.930007 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.942785 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.943271 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.956116 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.956231 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.956291 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.956500 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.956500 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.957062 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.957194 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.957602 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.957855 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.967554 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.973261 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.973359 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.973394 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.973429 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.973457 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.973498 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.973534 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.973562 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.973596 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.973625 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.973654 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.973702 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.973738 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.973761 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.973834 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.973858 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.973906 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.973927 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.973952 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.973981 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.974006 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.974031 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.974079 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.974103 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.974153 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.976064 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.976151 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.976188 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.976228 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.976262 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.976291 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.976321 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.976360 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.976403 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.976437 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.976463 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.976494 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.976618 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.976658 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.976692 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.976731 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.976771 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.976827 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.976860 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.976938 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.976975 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.977007 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.977034 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.977074 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.977133 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.977164 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.977196 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.977236 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.977268 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.977296 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.977333 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.977478 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.977509 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.977543 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.977574 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.977607 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.977635 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.977903 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.977982 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.978030 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.978065 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.978102 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.978183 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.978217 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.978269 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.978297 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.978330 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.978499 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.978568 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.978595 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.978622 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.978651 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.978693 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.978745 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.978777 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.978863 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.978899 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.978930 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.978979 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.979027 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.979058 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.979111 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.979139 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.979382 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.979416 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.979472 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.979534 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.979566 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.979598 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.979640 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.979672 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.979701 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.979760 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.979863 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.979912 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.979943 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.979967 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.980031 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.980060 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.980223 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.980271 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.980319 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.980346 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.980373 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.980435 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.980481 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.980503 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.980550 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.980581 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.980625 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.980654 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.980700 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.980729 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.980754 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.980784 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.973937 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.973942 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.973956 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.974163 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.974184 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.974231 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.978019 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.979120 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.979367 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.982203 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.992412 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.992502 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.992603 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.993143 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.993525 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.993542 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.993884 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.994290 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.994548 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.987921 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.999025 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.999146 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.999195 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.999229 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.999257 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.999290 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.999322 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.999357 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.999388 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.999420 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.999452 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.999486 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.999516 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Sep 29 21:25:41 crc kubenswrapper[4911]: I0929 21:25:41.999542 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.000450 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.000649 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.001402 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.001764 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.002133 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.002190 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.002481 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.003347 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.003831 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.004206 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.004899 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.005060 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.008056 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.008237 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.008350 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.008426 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.008688 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.009464 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.009505 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.010262 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.010634 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.011003 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.011351 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.011698 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.012064 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.012860 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.013241 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.013464 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.013687 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.013879 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.014100 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.014250 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.014628 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.014986 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.015161 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.015488 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.015833 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.016289 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.016724 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.017941 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.018425 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.018667 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.019242 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.019315 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.019413 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.019431 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.019595 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.019617 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.019574 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.019854 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.020000 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.020129 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.020016 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.020251 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.020467 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.020572 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.020673 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.019483 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.022877 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.022957 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.022984 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.023008 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.023030 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.023070 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.023089 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.023114 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.023133 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.023156 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.023176 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.023199 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.023259 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.023282 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.023302 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.023320 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.023341 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.023364 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.023382 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.023400 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.023428 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.023456 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.023473 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.023491 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.023506 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.023522 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.023542 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.023560 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.023576 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.023598 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.023614 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.023630 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.023647 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.023664 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.023684 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.023702 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.023721 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.023742 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.023760 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.023780 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.023813 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.024897 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.024952 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.024977 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.025002 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.025030 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.025054 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.025075 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.025091 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.025116 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.023277 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.023333 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.002226 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.023377 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.023490 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.023746 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.024149 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: E0929 21:25:42.025190 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 21:25:42.525163212 +0000 UTC m=+20.502275883 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.035149 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.035315 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.035457 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.035487 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.035560 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.035606 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.035627 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.035649 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.035686 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.035705 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.035730 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.036103 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.036140 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.036173 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.036199 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.036217 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.036238 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.036257 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.036317 4911 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.036332 4911 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.036345 4911 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.036357 4911 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.036366 4911 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.036415 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.036429 4911 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.036444 4911 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.036454 4911 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.036464 4911 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.036473 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.036484 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.036494 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.036503 4911 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.036513 4911 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.036522 4911 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.036534 4911 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.036545 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.036559 4911 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.036568 4911 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.036580 4911 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.036596 4911 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.036608 4911 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.036617 4911 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.036626 4911 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.036636 4911 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.036648 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.036658 4911 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.036667 4911 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.036677 4911 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.036687 4911 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.036700 4911 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.036711 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.036722 4911 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.036733 4911 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.036748 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.036762 4911 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.036780 4911 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.036809 4911 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.036824 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.036834 4911 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.036844 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.036854 4911 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.036864 4911 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.036874 4911 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.036883 4911 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.036893 4911 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.036903 4911 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.036911 4911 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.036920 4911 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.036930 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.036939 4911 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.036950 4911 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.036960 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.036970 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.036979 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.036990 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.036999 4911 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.037010 4911 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.037020 4911 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.037031 4911 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.037041 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.037050 4911 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.037060 4911 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.037069 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.037079 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.037090 4911 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.037099 4911 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.037110 4911 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.037121 4911 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.037130 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.037141 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.037151 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.037159 4911 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.037169 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.037178 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.037187 4911 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.037197 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.037206 4911 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.037217 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.042269 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-dnhjh"] Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.042748 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-dnhjh" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.052130 4911 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.035238 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.055332 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.035363 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.025215 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.025445 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.025590 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.025893 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.025892 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.026035 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.026311 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.026456 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.026492 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.026772 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.056552 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.026911 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.027033 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.027643 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.027739 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.027951 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.028719 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.028845 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.028625 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.029196 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.029428 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.029993 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.030066 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.030204 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.030745 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.030802 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.030812 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.030964 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.031228 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.031441 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.031499 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.031546 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.031695 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.031988 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.032479 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.032645 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.033213 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.033924 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.034172 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.034448 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.034651 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.034656 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.035070 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.035429 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.035580 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.035688 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.035968 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.036346 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.036349 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.036543 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.036575 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.036649 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.036862 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.036921 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.037080 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.037219 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.037238 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.037769 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.037941 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.037953 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.041503 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.025182 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.048694 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.048808 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.054125 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.054174 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.054692 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.055220 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.055236 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.055836 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.056224 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.056382 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.056423 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.056638 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: E0929 21:25:42.057049 4911 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.057213 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.057250 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.057500 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.058233 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.058303 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.058925 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.059006 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.059172 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.059657 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: E0929 21:25:42.059812 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 21:25:42.559770765 +0000 UTC m=+20.536883436 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.059837 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.060583 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: E0929 21:25:42.060829 4911 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 21:25:42 crc kubenswrapper[4911]: E0929 21:25:42.061025 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 21:25:42.560986963 +0000 UTC m=+20.538099814 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.063741 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.063900 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.063918 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.065140 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.066150 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.066334 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.066340 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.066401 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: E0929 21:25:42.069591 4911 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 21:25:42 crc kubenswrapper[4911]: E0929 21:25:42.069640 4911 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 21:25:42 crc kubenswrapper[4911]: E0929 21:25:42.069660 4911 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 21:25:42 crc kubenswrapper[4911]: E0929 21:25:42.069725 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-29 21:25:42.569706158 +0000 UTC m=+20.546818829 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.071086 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.071245 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.071833 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.072025 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.072898 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.073151 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.074240 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.074967 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.075068 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.077445 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 21:25:42 crc kubenswrapper[4911]: E0929 21:25:42.077775 4911 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 21:25:42 crc kubenswrapper[4911]: E0929 21:25:42.077825 4911 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 21:25:42 crc kubenswrapper[4911]: E0929 21:25:42.077840 4911 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 21:25:42 crc kubenswrapper[4911]: E0929 21:25:42.077906 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-29 21:25:42.577887178 +0000 UTC m=+20.554999849 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.081289 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.081852 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.081906 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.082168 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.082306 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.082625 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.083122 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.083483 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.083688 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.084517 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.085372 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.090549 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f56832a-7fd7-428e-85ee-55c5dea7b67a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ff3cf4013e69405637fe1483d5eb8ab2d12a0ba222c8446d93445dcb94ac9b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b32578d3e75bb839ad134f88850ba1b26e57846431fac71b64c5ddbce802464\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4539e6f57be0c686d5ea6ccbf1537c7c93698f81d2fdcfe5a143ce1f014e09\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57c9f68393ebed4fd5fe219544b0585a743989f8c54239e19ec702bddf9c30f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9dfa222360c949632050bd1dc5453a14c0dfa0d4614282132ceea2297405443e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T21:25:36Z\\\",\\\"message\\\":\\\"W0929 21:25:25.867198 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 21:25:25.867655 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759181125 cert, and key in /tmp/serving-cert-2091738680/serving-signer.crt, /tmp/serving-cert-2091738680/serving-signer.key\\\\nI0929 21:25:26.230899 1 observer_polling.go:159] Starting file observer\\\\nW0929 21:25:26.235864 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 21:25:26.236118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 21:25:26.238856 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2091738680/tls.crt::/tmp/serving-cert-2091738680/tls.key\\\\\\\"\\\\nF0929 21:25:36.606155 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47cf5d1e65f887e84d3c0694d5038edf3451c4af14d08a29d48562d05e36ea26\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.091764 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.101826 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.106824 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.108157 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.116491 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.128107 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.136868 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.138186 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-64hpk\" (UniqueName: \"kubernetes.io/projected/4fe62a53-f0b8-4c66-8adf-3b9f8bef4195-kube-api-access-64hpk\") pod \"node-resolver-dnhjh\" (UID: \"4fe62a53-f0b8-4c66-8adf-3b9f8bef4195\") " pod="openshift-dns/node-resolver-dnhjh" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.138299 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/4fe62a53-f0b8-4c66-8adf-3b9f8bef4195-hosts-file\") pod \"node-resolver-dnhjh\" (UID: \"4fe62a53-f0b8-4c66-8adf-3b9f8bef4195\") " pod="openshift-dns/node-resolver-dnhjh" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.138343 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.138368 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.138446 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.138462 4911 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.138509 4911 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.138524 4911 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.138519 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.138536 4911 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.138604 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.138621 4911 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.138635 4911 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.138648 4911 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.138662 4911 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.138676 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.138693 4911 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.138673 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.138707 4911 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.138782 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.138812 4911 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.138825 4911 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.138838 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.138848 4911 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.138858 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.138867 4911 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.138876 4911 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.138886 4911 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.138895 4911 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.138904 4911 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.138915 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.138924 4911 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.138933 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.138942 4911 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.138952 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.138960 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.138970 4911 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.138980 4911 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.138991 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.139002 4911 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.139031 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.139043 4911 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.139133 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.139159 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.139198 4911 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.139212 4911 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.139226 4911 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.139238 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.139271 4911 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.139287 4911 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.139299 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.139313 4911 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.139327 4911 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.139360 4911 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.139373 4911 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.139388 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.139401 4911 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.139435 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.139452 4911 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.139465 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.139481 4911 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.139517 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.139533 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.139550 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.139566 4911 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.139604 4911 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.139619 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.139633 4911 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.139647 4911 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.139680 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.139694 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.139707 4911 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.139721 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.139765 4911 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.139780 4911 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.139835 4911 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.139852 4911 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.139867 4911 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.139880 4911 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.139918 4911 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.139930 4911 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.139943 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.139957 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.139991 4911 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.140006 4911 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.140023 4911 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.140036 4911 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.140068 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.140090 4911 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.140102 4911 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.140116 4911 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.140147 4911 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.140163 4911 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.140177 4911 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.140190 4911 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.140202 4911 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.140236 4911 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.140248 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.140263 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.140276 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.140309 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.140323 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.140336 4911 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.140348 4911 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.140360 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.140393 4911 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.140406 4911 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.140418 4911 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.140434 4911 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.140467 4911 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.140480 4911 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.140491 4911 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.140505 4911 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.140517 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.140550 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.140562 4911 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.140574 4911 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.140586 4911 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.140597 4911 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.140629 4911 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.140746 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.148773 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dnhjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fe62a53-f0b8-4c66-8adf-3b9f8bef4195\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64hpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dnhjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.156411 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.165097 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f56832a-7fd7-428e-85ee-55c5dea7b67a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ff3cf4013e69405637fe1483d5eb8ab2d12a0ba222c8446d93445dcb94ac9b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b32578d3e75bb839ad134f88850ba1b26e57846431fac71b64c5ddbce802464\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4539e6f57be0c686d5ea6ccbf1537c7c93698f81d2fdcfe5a143ce1f014e09\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57c9f68393ebed4fd5fe219544b0585a743989f8c54239e19ec702bddf9c30f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9dfa222360c949632050bd1dc5453a14c0dfa0d4614282132ceea2297405443e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T21:25:36Z\\\",\\\"message\\\":\\\"W0929 21:25:25.867198 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 21:25:25.867655 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759181125 cert, and key in /tmp/serving-cert-2091738680/serving-signer.crt, /tmp/serving-cert-2091738680/serving-signer.key\\\\nI0929 21:25:26.230899 1 observer_polling.go:159] Starting file observer\\\\nW0929 21:25:26.235864 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 21:25:26.236118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 21:25:26.238856 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2091738680/tls.crt::/tmp/serving-cert-2091738680/tls.key\\\\\\\"\\\\nF0929 21:25:36.606155 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47cf5d1e65f887e84d3c0694d5038edf3451c4af14d08a29d48562d05e36ea26\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.170452 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.178284 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8646497-f7ad-4b38-a69f-9a14345af1c8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d3bb437bdac371dd4e22f20f304a5381c128ac2b774c51bc4f748a1c7aa3834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://129d51f84c8798ed7b1078f34899d4231c7bf4e1a75212f694c6c027b8463b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://290e37c77297fae7dcb3540911f6b2917e26daee3623c9106e34de394ccfa01f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7088f5a3752b8778de247eacb408d511197d1501e8e2811a98a9b713ca1fca04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.189948 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 21:25:42 crc kubenswrapper[4911]: W0929 21:25:42.193901 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef543e1b_8068_4ea3_b32a_61027b32e95d.slice/crio-77e48211e4d69bf8eaec52a83e17a33044e7128f9bcc220a13bfc3793bc05633 WatchSource:0}: Error finding container 77e48211e4d69bf8eaec52a83e17a33044e7128f9bcc220a13bfc3793bc05633: Status 404 returned error can't find the container with id 77e48211e4d69bf8eaec52a83e17a33044e7128f9bcc220a13bfc3793bc05633 Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.202099 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.217415 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.230287 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.242025 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-64hpk\" (UniqueName: \"kubernetes.io/projected/4fe62a53-f0b8-4c66-8adf-3b9f8bef4195-kube-api-access-64hpk\") pod \"node-resolver-dnhjh\" (UID: \"4fe62a53-f0b8-4c66-8adf-3b9f8bef4195\") " pod="openshift-dns/node-resolver-dnhjh" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.242093 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/4fe62a53-f0b8-4c66-8adf-3b9f8bef4195-hosts-file\") pod \"node-resolver-dnhjh\" (UID: \"4fe62a53-f0b8-4c66-8adf-3b9f8bef4195\") " pod="openshift-dns/node-resolver-dnhjh" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.242171 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/4fe62a53-f0b8-4c66-8adf-3b9f8bef4195-hosts-file\") pod \"node-resolver-dnhjh\" (UID: \"4fe62a53-f0b8-4c66-8adf-3b9f8bef4195\") " pod="openshift-dns/node-resolver-dnhjh" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.259684 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-64hpk\" (UniqueName: \"kubernetes.io/projected/4fe62a53-f0b8-4c66-8adf-3b9f8bef4195-kube-api-access-64hpk\") pod \"node-resolver-dnhjh\" (UID: \"4fe62a53-f0b8-4c66-8adf-3b9f8bef4195\") " pod="openshift-dns/node-resolver-dnhjh" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.392341 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-dnhjh" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.545601 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:25:42 crc kubenswrapper[4911]: E0929 21:25:42.546013 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 21:25:43.545996382 +0000 UTC m=+21.523109053 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.646866 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.647081 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 21:25:42 crc kubenswrapper[4911]: E0929 21:25:42.647158 4911 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 21:25:42 crc kubenswrapper[4911]: E0929 21:25:42.647284 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 21:25:43.647257818 +0000 UTC m=+21.624370509 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.647287 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:25:42 crc kubenswrapper[4911]: E0929 21:25:42.647322 4911 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 21:25:42 crc kubenswrapper[4911]: E0929 21:25:42.647351 4911 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 21:25:42 crc kubenswrapper[4911]: E0929 21:25:42.647370 4911 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.647365 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 21:25:42 crc kubenswrapper[4911]: E0929 21:25:42.647432 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-29 21:25:43.647406742 +0000 UTC m=+21.624519493 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 21:25:42 crc kubenswrapper[4911]: E0929 21:25:42.647430 4911 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 21:25:42 crc kubenswrapper[4911]: E0929 21:25:42.647481 4911 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 21:25:42 crc kubenswrapper[4911]: E0929 21:25:42.647500 4911 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 21:25:42 crc kubenswrapper[4911]: E0929 21:25:42.647525 4911 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 21:25:42 crc kubenswrapper[4911]: E0929 21:25:42.647549 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 21:25:43.647522346 +0000 UTC m=+21.624635037 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 21:25:42 crc kubenswrapper[4911]: E0929 21:25:42.647584 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-29 21:25:43.647574508 +0000 UTC m=+21.624687199 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.700012 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.700061 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:25:42 crc kubenswrapper[4911]: E0929 21:25:42.700495 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 21:25:42 crc kubenswrapper[4911]: E0929 21:25:42.700552 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.705877 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.706512 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.708129 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.708914 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.710105 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.710776 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.711546 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.712757 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.713427 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.713576 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.714753 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.715405 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.716731 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.717399 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.718080 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.719363 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.720047 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.721268 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.721762 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.722571 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.723846 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.724401 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.725931 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.726484 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.728000 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.728613 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.729589 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.731995 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.732650 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.734001 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.734562 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.735639 4911 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.735766 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.736940 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.738089 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.739024 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.739429 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.741116 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.742002 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.742919 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.743539 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.744635 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.745128 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.746156 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.747022 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.748009 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.748462 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.749395 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.750006 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.751318 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.752033 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.752870 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dnhjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fe62a53-f0b8-4c66-8adf-3b9f8bef4195\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64hpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dnhjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.752997 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.753941 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.755011 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.756259 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.756894 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.773436 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f56832a-7fd7-428e-85ee-55c5dea7b67a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ff3cf4013e69405637fe1483d5eb8ab2d12a0ba222c8446d93445dcb94ac9b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b32578d3e75bb839ad134f88850ba1b26e57846431fac71b64c5ddbce802464\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4539e6f57be0c686d5ea6ccbf1537c7c93698f81d2fdcfe5a143ce1f014e09\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57c9f68393ebed4fd5fe219544b0585a743989f8c54239e19ec702bddf9c30f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9dfa222360c949632050bd1dc5453a14c0dfa0d4614282132ceea2297405443e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T21:25:36Z\\\",\\\"message\\\":\\\"W0929 21:25:25.867198 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 21:25:25.867655 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759181125 cert, and key in /tmp/serving-cert-2091738680/serving-signer.crt, /tmp/serving-cert-2091738680/serving-signer.key\\\\nI0929 21:25:26.230899 1 observer_polling.go:159] Starting file observer\\\\nW0929 21:25:26.235864 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 21:25:26.236118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 21:25:26.238856 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2091738680/tls.crt::/tmp/serving-cert-2091738680/tls.key\\\\\\\"\\\\nF0929 21:25:36.606155 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47cf5d1e65f887e84d3c0694d5038edf3451c4af14d08a29d48562d05e36ea26\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.795588 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8646497-f7ad-4b38-a69f-9a14345af1c8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d3bb437bdac371dd4e22f20f304a5381c128ac2b774c51bc4f748a1c7aa3834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://129d51f84c8798ed7b1078f34899d4231c7bf4e1a75212f694c6c027b8463b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://290e37c77297fae7dcb3540911f6b2917e26daee3623c9106e34de394ccfa01f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7088f5a3752b8778de247eacb408d511197d1501e8e2811a98a9b713ca1fca04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.807898 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.819203 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.869870 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.870000 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"4c6a0af8cc03379c6fa2a3969e5852a340c32bd8513dc7c963e074bb1694d436"} Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.870057 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"dc8a58f53fb06ded171ca8cdd7d4c559d083eebb5ff617eed177729f0ff08c08"} Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.873534 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-dnhjh" event={"ID":"4fe62a53-f0b8-4c66-8adf-3b9f8bef4195","Type":"ContainerStarted","Data":"b11f03867ce6c678d04431a19903c21acd7665f0cca4c7b0344e9d1cf0e17e37"} Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.873565 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-dnhjh" event={"ID":"4fe62a53-f0b8-4c66-8adf-3b9f8bef4195","Type":"ContainerStarted","Data":"00eb6d7ad1b73ae4266ebaadb463830b5ca9bb530999e16ecfbe40de7573f2f7"} Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.879089 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"a4052e710e0a06bf38d26e6006248d090051284bc0eae7a68b17241ef8016909"} Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.879251 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"6a7d7fad5efb67cc7ef796977f03cc9c0e69fcc4340ca28749fffcc13236ed8a"} Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.879287 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"77e48211e4d69bf8eaec52a83e17a33044e7128f9bcc220a13bfc3793bc05633"} Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.880454 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"5a6f955c2867b049ec3f89e770f5da173ca1d91f50205b59461d02787f2d168e"} Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.883676 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.899473 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.913128 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.931423 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c6a0af8cc03379c6fa2a3969e5852a340c32bd8513dc7c963e074bb1694d436\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.949566 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4052e710e0a06bf38d26e6006248d090051284bc0eae7a68b17241ef8016909\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a7d7fad5efb67cc7ef796977f03cc9c0e69fcc4340ca28749fffcc13236ed8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.962657 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dnhjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fe62a53-f0b8-4c66-8adf-3b9f8bef4195\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b11f03867ce6c678d04431a19903c21acd7665f0cca4c7b0344e9d1cf0e17e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64hpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dnhjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.975078 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f56832a-7fd7-428e-85ee-55c5dea7b67a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ff3cf4013e69405637fe1483d5eb8ab2d12a0ba222c8446d93445dcb94ac9b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b32578d3e75bb839ad134f88850ba1b26e57846431fac71b64c5ddbce802464\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4539e6f57be0c686d5ea6ccbf1537c7c93698f81d2fdcfe5a143ce1f014e09\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57c9f68393ebed4fd5fe219544b0585a743989f8c54239e19ec702bddf9c30f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9dfa222360c949632050bd1dc5453a14c0dfa0d4614282132ceea2297405443e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T21:25:36Z\\\",\\\"message\\\":\\\"W0929 21:25:25.867198 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 21:25:25.867655 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759181125 cert, and key in /tmp/serving-cert-2091738680/serving-signer.crt, /tmp/serving-cert-2091738680/serving-signer.key\\\\nI0929 21:25:26.230899 1 observer_polling.go:159] Starting file observer\\\\nW0929 21:25:26.235864 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 21:25:26.236118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 21:25:26.238856 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2091738680/tls.crt::/tmp/serving-cert-2091738680/tls.key\\\\\\\"\\\\nF0929 21:25:36.606155 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47cf5d1e65f887e84d3c0694d5038edf3451c4af14d08a29d48562d05e36ea26\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.984897 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8646497-f7ad-4b38-a69f-9a14345af1c8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d3bb437bdac371dd4e22f20f304a5381c128ac2b774c51bc4f748a1c7aa3834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://129d51f84c8798ed7b1078f34899d4231c7bf4e1a75212f694c6c027b8463b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://290e37c77297fae7dcb3540911f6b2917e26daee3623c9106e34de394ccfa01f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7088f5a3752b8778de247eacb408d511197d1501e8e2811a98a9b713ca1fca04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 21:25:42 crc kubenswrapper[4911]: I0929 21:25:42.997699 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.014350 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:43Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.572261 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:25:43 crc kubenswrapper[4911]: E0929 21:25:43.572534 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 21:25:45.572489421 +0000 UTC m=+23.549602102 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.657269 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-lrfbg"] Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.657701 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-lrfbg" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.658870 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-bp485"] Sep 29 21:25:43 crc kubenswrapper[4911]: W0929 21:25:43.659964 4911 reflector.go:561] object-"openshift-multus"/"cni-copy-resources": failed to list *v1.ConfigMap: configmaps "cni-copy-resources" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-multus": no relationship found between node 'crc' and this object Sep 29 21:25:43 crc kubenswrapper[4911]: E0929 21:25:43.660021 4911 reflector.go:158] "Unhandled Error" err="object-\"openshift-multus\"/\"cni-copy-resources\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"cni-copy-resources\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-multus\": no relationship found between node 'crc' and this object" logger="UnhandledError" Sep 29 21:25:43 crc kubenswrapper[4911]: W0929 21:25:43.660051 4911 reflector.go:561] object-"openshift-multus"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-multus": no relationship found between node 'crc' and this object Sep 29 21:25:43 crc kubenswrapper[4911]: W0929 21:25:43.660116 4911 reflector.go:561] object-"openshift-multus"/"default-dockercfg-2q5b6": failed to list *v1.Secret: secrets "default-dockercfg-2q5b6" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-multus": no relationship found between node 'crc' and this object Sep 29 21:25:43 crc kubenswrapper[4911]: E0929 21:25:43.660146 4911 reflector.go:158] "Unhandled Error" err="object-\"openshift-multus\"/\"default-dockercfg-2q5b6\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"default-dockercfg-2q5b6\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-multus\": no relationship found between node 'crc' and this object" logger="UnhandledError" Sep 29 21:25:43 crc kubenswrapper[4911]: E0929 21:25:43.660170 4911 reflector.go:158] "Unhandled Error" err="object-\"openshift-multus\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-multus\": no relationship found between node 'crc' and this object" logger="UnhandledError" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.660670 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.661204 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.661871 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-bp485" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.663605 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.663626 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.672859 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.672920 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.672966 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:25:43 crc kubenswrapper[4911]: E0929 21:25:43.672992 4911 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 21:25:43 crc kubenswrapper[4911]: E0929 21:25:43.673019 4911 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 21:25:43 crc kubenswrapper[4911]: E0929 21:25:43.673038 4911 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 21:25:43 crc kubenswrapper[4911]: E0929 21:25:43.673052 4911 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 21:25:43 crc kubenswrapper[4911]: E0929 21:25:43.673063 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 21:25:45.673046605 +0000 UTC m=+23.650159276 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 21:25:43 crc kubenswrapper[4911]: E0929 21:25:43.673085 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-29 21:25:45.673076516 +0000 UTC m=+23.650189177 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.673001 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 21:25:43 crc kubenswrapper[4911]: E0929 21:25:43.673156 4911 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 21:25:43 crc kubenswrapper[4911]: E0929 21:25:43.673177 4911 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 21:25:43 crc kubenswrapper[4911]: E0929 21:25:43.673185 4911 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 21:25:43 crc kubenswrapper[4911]: E0929 21:25:43.673203 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 21:25:45.67319551 +0000 UTC m=+23.650308171 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 21:25:43 crc kubenswrapper[4911]: E0929 21:25:43.673205 4911 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 21:25:43 crc kubenswrapper[4911]: E0929 21:25:43.673282 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-29 21:25:45.673257781 +0000 UTC m=+23.650370492 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.676326 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:43Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.692756 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:43Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.701078 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 21:25:43 crc kubenswrapper[4911]: E0929 21:25:43.701210 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.706730 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c6a0af8cc03379c6fa2a3969e5852a340c32bd8513dc7c963e074bb1694d436\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:43Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.721847 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4052e710e0a06bf38d26e6006248d090051284bc0eae7a68b17241ef8016909\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a7d7fad5efb67cc7ef796977f03cc9c0e69fcc4340ca28749fffcc13236ed8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:43Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.735494 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dnhjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fe62a53-f0b8-4c66-8adf-3b9f8bef4195\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b11f03867ce6c678d04431a19903c21acd7665f0cca4c7b0344e9d1cf0e17e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64hpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dnhjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:43Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.752277 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f56832a-7fd7-428e-85ee-55c5dea7b67a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ff3cf4013e69405637fe1483d5eb8ab2d12a0ba222c8446d93445dcb94ac9b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b32578d3e75bb839ad134f88850ba1b26e57846431fac71b64c5ddbce802464\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4539e6f57be0c686d5ea6ccbf1537c7c93698f81d2fdcfe5a143ce1f014e09\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57c9f68393ebed4fd5fe219544b0585a743989f8c54239e19ec702bddf9c30f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9dfa222360c949632050bd1dc5453a14c0dfa0d4614282132ceea2297405443e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T21:25:36Z\\\",\\\"message\\\":\\\"W0929 21:25:25.867198 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 21:25:25.867655 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759181125 cert, and key in /tmp/serving-cert-2091738680/serving-signer.crt, /tmp/serving-cert-2091738680/serving-signer.key\\\\nI0929 21:25:26.230899 1 observer_polling.go:159] Starting file observer\\\\nW0929 21:25:26.235864 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 21:25:26.236118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 21:25:26.238856 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2091738680/tls.crt::/tmp/serving-cert-2091738680/tls.key\\\\\\\"\\\\nF0929 21:25:36.606155 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47cf5d1e65f887e84d3c0694d5038edf3451c4af14d08a29d48562d05e36ea26\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:43Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.773842 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/1179c900-e866-4c5a-bb06-6032cc03a075-multus-socket-dir-parent\") pod \"multus-lrfbg\" (UID: \"1179c900-e866-4c5a-bb06-6032cc03a075\") " pod="openshift-multus/multus-lrfbg" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.773893 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/1179c900-e866-4c5a-bb06-6032cc03a075-etc-kubernetes\") pod \"multus-lrfbg\" (UID: \"1179c900-e866-4c5a-bb06-6032cc03a075\") " pod="openshift-multus/multus-lrfbg" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.773912 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/364060da-3bac-4f3e-b8b8-a64b0441cb5e-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-bp485\" (UID: \"364060da-3bac-4f3e-b8b8-a64b0441cb5e\") " pod="openshift-multus/multus-additional-cni-plugins-bp485" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.773938 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/1179c900-e866-4c5a-bb06-6032cc03a075-multus-cni-dir\") pod \"multus-lrfbg\" (UID: \"1179c900-e866-4c5a-bb06-6032cc03a075\") " pod="openshift-multus/multus-lrfbg" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.773959 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/1179c900-e866-4c5a-bb06-6032cc03a075-multus-daemon-config\") pod \"multus-lrfbg\" (UID: \"1179c900-e866-4c5a-bb06-6032cc03a075\") " pod="openshift-multus/multus-lrfbg" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.773976 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/1179c900-e866-4c5a-bb06-6032cc03a075-cnibin\") pod \"multus-lrfbg\" (UID: \"1179c900-e866-4c5a-bb06-6032cc03a075\") " pod="openshift-multus/multus-lrfbg" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.773991 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/1179c900-e866-4c5a-bb06-6032cc03a075-host-run-multus-certs\") pod \"multus-lrfbg\" (UID: \"1179c900-e866-4c5a-bb06-6032cc03a075\") " pod="openshift-multus/multus-lrfbg" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.774006 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/1179c900-e866-4c5a-bb06-6032cc03a075-host-var-lib-kubelet\") pod \"multus-lrfbg\" (UID: \"1179c900-e866-4c5a-bb06-6032cc03a075\") " pod="openshift-multus/multus-lrfbg" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.774041 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/1179c900-e866-4c5a-bb06-6032cc03a075-cni-binary-copy\") pod \"multus-lrfbg\" (UID: \"1179c900-e866-4c5a-bb06-6032cc03a075\") " pod="openshift-multus/multus-lrfbg" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.774070 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/1179c900-e866-4c5a-bb06-6032cc03a075-host-var-lib-cni-bin\") pod \"multus-lrfbg\" (UID: \"1179c900-e866-4c5a-bb06-6032cc03a075\") " pod="openshift-multus/multus-lrfbg" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.774087 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/1179c900-e866-4c5a-bb06-6032cc03a075-multus-conf-dir\") pod \"multus-lrfbg\" (UID: \"1179c900-e866-4c5a-bb06-6032cc03a075\") " pod="openshift-multus/multus-lrfbg" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.774115 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/364060da-3bac-4f3e-b8b8-a64b0441cb5e-cnibin\") pod \"multus-additional-cni-plugins-bp485\" (UID: \"364060da-3bac-4f3e-b8b8-a64b0441cb5e\") " pod="openshift-multus/multus-additional-cni-plugins-bp485" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.774132 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/364060da-3bac-4f3e-b8b8-a64b0441cb5e-tuning-conf-dir\") pod \"multus-additional-cni-plugins-bp485\" (UID: \"364060da-3bac-4f3e-b8b8-a64b0441cb5e\") " pod="openshift-multus/multus-additional-cni-plugins-bp485" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.774150 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/1179c900-e866-4c5a-bb06-6032cc03a075-system-cni-dir\") pod \"multus-lrfbg\" (UID: \"1179c900-e866-4c5a-bb06-6032cc03a075\") " pod="openshift-multus/multus-lrfbg" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.774223 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/1179c900-e866-4c5a-bb06-6032cc03a075-host-run-k8s-cni-cncf-io\") pod \"multus-lrfbg\" (UID: \"1179c900-e866-4c5a-bb06-6032cc03a075\") " pod="openshift-multus/multus-lrfbg" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.774286 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/364060da-3bac-4f3e-b8b8-a64b0441cb5e-system-cni-dir\") pod \"multus-additional-cni-plugins-bp485\" (UID: \"364060da-3bac-4f3e-b8b8-a64b0441cb5e\") " pod="openshift-multus/multus-additional-cni-plugins-bp485" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.774301 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/1179c900-e866-4c5a-bb06-6032cc03a075-host-var-lib-cni-multus\") pod \"multus-lrfbg\" (UID: \"1179c900-e866-4c5a-bb06-6032cc03a075\") " pod="openshift-multus/multus-lrfbg" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.774373 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/1179c900-e866-4c5a-bb06-6032cc03a075-hostroot\") pod \"multus-lrfbg\" (UID: \"1179c900-e866-4c5a-bb06-6032cc03a075\") " pod="openshift-multus/multus-lrfbg" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.774430 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/364060da-3bac-4f3e-b8b8-a64b0441cb5e-os-release\") pod \"multus-additional-cni-plugins-bp485\" (UID: \"364060da-3bac-4f3e-b8b8-a64b0441cb5e\") " pod="openshift-multus/multus-additional-cni-plugins-bp485" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.774456 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/1179c900-e866-4c5a-bb06-6032cc03a075-os-release\") pod \"multus-lrfbg\" (UID: \"1179c900-e866-4c5a-bb06-6032cc03a075\") " pod="openshift-multus/multus-lrfbg" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.774478 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jwhfq\" (UniqueName: \"kubernetes.io/projected/1179c900-e866-4c5a-bb06-6032cc03a075-kube-api-access-jwhfq\") pod \"multus-lrfbg\" (UID: \"1179c900-e866-4c5a-bb06-6032cc03a075\") " pod="openshift-multus/multus-lrfbg" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.774501 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/364060da-3bac-4f3e-b8b8-a64b0441cb5e-cni-binary-copy\") pod \"multus-additional-cni-plugins-bp485\" (UID: \"364060da-3bac-4f3e-b8b8-a64b0441cb5e\") " pod="openshift-multus/multus-additional-cni-plugins-bp485" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.774565 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/1179c900-e866-4c5a-bb06-6032cc03a075-host-run-netns\") pod \"multus-lrfbg\" (UID: \"1179c900-e866-4c5a-bb06-6032cc03a075\") " pod="openshift-multus/multus-lrfbg" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.774618 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ng4nx\" (UniqueName: \"kubernetes.io/projected/364060da-3bac-4f3e-b8b8-a64b0441cb5e-kube-api-access-ng4nx\") pod \"multus-additional-cni-plugins-bp485\" (UID: \"364060da-3bac-4f3e-b8b8-a64b0441cb5e\") " pod="openshift-multus/multus-additional-cni-plugins-bp485" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.776687 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8646497-f7ad-4b38-a69f-9a14345af1c8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d3bb437bdac371dd4e22f20f304a5381c128ac2b774c51bc4f748a1c7aa3834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://129d51f84c8798ed7b1078f34899d4231c7bf4e1a75212f694c6c027b8463b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://290e37c77297fae7dcb3540911f6b2917e26daee3623c9106e34de394ccfa01f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7088f5a3752b8778de247eacb408d511197d1501e8e2811a98a9b713ca1fca04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:43Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.787544 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:43Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.801537 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:43Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.816952 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lrfbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1179c900-e866-4c5a-bb06-6032cc03a075\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jwhfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lrfbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:43Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.833398 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:43Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.849534 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:43Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.862739 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c6a0af8cc03379c6fa2a3969e5852a340c32bd8513dc7c963e074bb1694d436\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:43Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.874865 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dnhjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fe62a53-f0b8-4c66-8adf-3b9f8bef4195\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b11f03867ce6c678d04431a19903c21acd7665f0cca4c7b0344e9d1cf0e17e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64hpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dnhjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:43Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.875030 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/1179c900-e866-4c5a-bb06-6032cc03a075-multus-socket-dir-parent\") pod \"multus-lrfbg\" (UID: \"1179c900-e866-4c5a-bb06-6032cc03a075\") " pod="openshift-multus/multus-lrfbg" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.875067 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/1179c900-e866-4c5a-bb06-6032cc03a075-etc-kubernetes\") pod \"multus-lrfbg\" (UID: \"1179c900-e866-4c5a-bb06-6032cc03a075\") " pod="openshift-multus/multus-lrfbg" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.875093 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/364060da-3bac-4f3e-b8b8-a64b0441cb5e-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-bp485\" (UID: \"364060da-3bac-4f3e-b8b8-a64b0441cb5e\") " pod="openshift-multus/multus-additional-cni-plugins-bp485" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.875112 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/1179c900-e866-4c5a-bb06-6032cc03a075-multus-cni-dir\") pod \"multus-lrfbg\" (UID: \"1179c900-e866-4c5a-bb06-6032cc03a075\") " pod="openshift-multus/multus-lrfbg" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.875128 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/1179c900-e866-4c5a-bb06-6032cc03a075-multus-daemon-config\") pod \"multus-lrfbg\" (UID: \"1179c900-e866-4c5a-bb06-6032cc03a075\") " pod="openshift-multus/multus-lrfbg" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.875143 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/1179c900-e866-4c5a-bb06-6032cc03a075-cnibin\") pod \"multus-lrfbg\" (UID: \"1179c900-e866-4c5a-bb06-6032cc03a075\") " pod="openshift-multus/multus-lrfbg" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.875159 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/1179c900-e866-4c5a-bb06-6032cc03a075-host-var-lib-kubelet\") pod \"multus-lrfbg\" (UID: \"1179c900-e866-4c5a-bb06-6032cc03a075\") " pod="openshift-multus/multus-lrfbg" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.875174 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/1179c900-e866-4c5a-bb06-6032cc03a075-host-run-multus-certs\") pod \"multus-lrfbg\" (UID: \"1179c900-e866-4c5a-bb06-6032cc03a075\") " pod="openshift-multus/multus-lrfbg" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.875206 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/1179c900-e866-4c5a-bb06-6032cc03a075-multus-conf-dir\") pod \"multus-lrfbg\" (UID: \"1179c900-e866-4c5a-bb06-6032cc03a075\") " pod="openshift-multus/multus-lrfbg" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.875235 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/1179c900-e866-4c5a-bb06-6032cc03a075-host-var-lib-kubelet\") pod \"multus-lrfbg\" (UID: \"1179c900-e866-4c5a-bb06-6032cc03a075\") " pod="openshift-multus/multus-lrfbg" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.875259 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/1179c900-e866-4c5a-bb06-6032cc03a075-multus-cni-dir\") pod \"multus-lrfbg\" (UID: \"1179c900-e866-4c5a-bb06-6032cc03a075\") " pod="openshift-multus/multus-lrfbg" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.875224 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/1179c900-e866-4c5a-bb06-6032cc03a075-cni-binary-copy\") pod \"multus-lrfbg\" (UID: \"1179c900-e866-4c5a-bb06-6032cc03a075\") " pod="openshift-multus/multus-lrfbg" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.875323 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/1179c900-e866-4c5a-bb06-6032cc03a075-multus-conf-dir\") pod \"multus-lrfbg\" (UID: \"1179c900-e866-4c5a-bb06-6032cc03a075\") " pod="openshift-multus/multus-lrfbg" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.875196 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/1179c900-e866-4c5a-bb06-6032cc03a075-multus-socket-dir-parent\") pod \"multus-lrfbg\" (UID: \"1179c900-e866-4c5a-bb06-6032cc03a075\") " pod="openshift-multus/multus-lrfbg" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.875328 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/1179c900-e866-4c5a-bb06-6032cc03a075-cnibin\") pod \"multus-lrfbg\" (UID: \"1179c900-e866-4c5a-bb06-6032cc03a075\") " pod="openshift-multus/multus-lrfbg" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.875356 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/1179c900-e866-4c5a-bb06-6032cc03a075-etc-kubernetes\") pod \"multus-lrfbg\" (UID: \"1179c900-e866-4c5a-bb06-6032cc03a075\") " pod="openshift-multus/multus-lrfbg" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.875374 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/1179c900-e866-4c5a-bb06-6032cc03a075-host-var-lib-cni-bin\") pod \"multus-lrfbg\" (UID: \"1179c900-e866-4c5a-bb06-6032cc03a075\") " pod="openshift-multus/multus-lrfbg" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.875365 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/1179c900-e866-4c5a-bb06-6032cc03a075-host-run-multus-certs\") pod \"multus-lrfbg\" (UID: \"1179c900-e866-4c5a-bb06-6032cc03a075\") " pod="openshift-multus/multus-lrfbg" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.875404 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/1179c900-e866-4c5a-bb06-6032cc03a075-system-cni-dir\") pod \"multus-lrfbg\" (UID: \"1179c900-e866-4c5a-bb06-6032cc03a075\") " pod="openshift-multus/multus-lrfbg" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.875448 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/1179c900-e866-4c5a-bb06-6032cc03a075-host-var-lib-cni-bin\") pod \"multus-lrfbg\" (UID: \"1179c900-e866-4c5a-bb06-6032cc03a075\") " pod="openshift-multus/multus-lrfbg" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.875515 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/364060da-3bac-4f3e-b8b8-a64b0441cb5e-cnibin\") pod \"multus-additional-cni-plugins-bp485\" (UID: \"364060da-3bac-4f3e-b8b8-a64b0441cb5e\") " pod="openshift-multus/multus-additional-cni-plugins-bp485" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.875442 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/1179c900-e866-4c5a-bb06-6032cc03a075-system-cni-dir\") pod \"multus-lrfbg\" (UID: \"1179c900-e866-4c5a-bb06-6032cc03a075\") " pod="openshift-multus/multus-lrfbg" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.875537 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/364060da-3bac-4f3e-b8b8-a64b0441cb5e-tuning-conf-dir\") pod \"multus-additional-cni-plugins-bp485\" (UID: \"364060da-3bac-4f3e-b8b8-a64b0441cb5e\") " pod="openshift-multus/multus-additional-cni-plugins-bp485" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.875555 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/364060da-3bac-4f3e-b8b8-a64b0441cb5e-cnibin\") pod \"multus-additional-cni-plugins-bp485\" (UID: \"364060da-3bac-4f3e-b8b8-a64b0441cb5e\") " pod="openshift-multus/multus-additional-cni-plugins-bp485" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.875564 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/1179c900-e866-4c5a-bb06-6032cc03a075-host-run-k8s-cni-cncf-io\") pod \"multus-lrfbg\" (UID: \"1179c900-e866-4c5a-bb06-6032cc03a075\") " pod="openshift-multus/multus-lrfbg" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.875586 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/364060da-3bac-4f3e-b8b8-a64b0441cb5e-system-cni-dir\") pod \"multus-additional-cni-plugins-bp485\" (UID: \"364060da-3bac-4f3e-b8b8-a64b0441cb5e\") " pod="openshift-multus/multus-additional-cni-plugins-bp485" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.875602 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/1179c900-e866-4c5a-bb06-6032cc03a075-host-var-lib-cni-multus\") pod \"multus-lrfbg\" (UID: \"1179c900-e866-4c5a-bb06-6032cc03a075\") " pod="openshift-multus/multus-lrfbg" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.875620 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/1179c900-e866-4c5a-bb06-6032cc03a075-hostroot\") pod \"multus-lrfbg\" (UID: \"1179c900-e866-4c5a-bb06-6032cc03a075\") " pod="openshift-multus/multus-lrfbg" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.875638 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/1179c900-e866-4c5a-bb06-6032cc03a075-host-run-k8s-cni-cncf-io\") pod \"multus-lrfbg\" (UID: \"1179c900-e866-4c5a-bb06-6032cc03a075\") " pod="openshift-multus/multus-lrfbg" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.875646 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jwhfq\" (UniqueName: \"kubernetes.io/projected/1179c900-e866-4c5a-bb06-6032cc03a075-kube-api-access-jwhfq\") pod \"multus-lrfbg\" (UID: \"1179c900-e866-4c5a-bb06-6032cc03a075\") " pod="openshift-multus/multus-lrfbg" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.875673 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/364060da-3bac-4f3e-b8b8-a64b0441cb5e-os-release\") pod \"multus-additional-cni-plugins-bp485\" (UID: \"364060da-3bac-4f3e-b8b8-a64b0441cb5e\") " pod="openshift-multus/multus-additional-cni-plugins-bp485" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.875674 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/1179c900-e866-4c5a-bb06-6032cc03a075-host-var-lib-cni-multus\") pod \"multus-lrfbg\" (UID: \"1179c900-e866-4c5a-bb06-6032cc03a075\") " pod="openshift-multus/multus-lrfbg" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.875701 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/1179c900-e866-4c5a-bb06-6032cc03a075-os-release\") pod \"multus-lrfbg\" (UID: \"1179c900-e866-4c5a-bb06-6032cc03a075\") " pod="openshift-multus/multus-lrfbg" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.875707 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/364060da-3bac-4f3e-b8b8-a64b0441cb5e-system-cni-dir\") pod \"multus-additional-cni-plugins-bp485\" (UID: \"364060da-3bac-4f3e-b8b8-a64b0441cb5e\") " pod="openshift-multus/multus-additional-cni-plugins-bp485" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.875721 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/364060da-3bac-4f3e-b8b8-a64b0441cb5e-cni-binary-copy\") pod \"multus-additional-cni-plugins-bp485\" (UID: \"364060da-3bac-4f3e-b8b8-a64b0441cb5e\") " pod="openshift-multus/multus-additional-cni-plugins-bp485" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.875749 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ng4nx\" (UniqueName: \"kubernetes.io/projected/364060da-3bac-4f3e-b8b8-a64b0441cb5e-kube-api-access-ng4nx\") pod \"multus-additional-cni-plugins-bp485\" (UID: \"364060da-3bac-4f3e-b8b8-a64b0441cb5e\") " pod="openshift-multus/multus-additional-cni-plugins-bp485" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.875764 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/1179c900-e866-4c5a-bb06-6032cc03a075-host-run-netns\") pod \"multus-lrfbg\" (UID: \"1179c900-e866-4c5a-bb06-6032cc03a075\") " pod="openshift-multus/multus-lrfbg" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.875850 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/1179c900-e866-4c5a-bb06-6032cc03a075-host-run-netns\") pod \"multus-lrfbg\" (UID: \"1179c900-e866-4c5a-bb06-6032cc03a075\") " pod="openshift-multus/multus-lrfbg" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.875920 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/364060da-3bac-4f3e-b8b8-a64b0441cb5e-os-release\") pod \"multus-additional-cni-plugins-bp485\" (UID: \"364060da-3bac-4f3e-b8b8-a64b0441cb5e\") " pod="openshift-multus/multus-additional-cni-plugins-bp485" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.875960 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/1179c900-e866-4c5a-bb06-6032cc03a075-hostroot\") pod \"multus-lrfbg\" (UID: \"1179c900-e866-4c5a-bb06-6032cc03a075\") " pod="openshift-multus/multus-lrfbg" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.876100 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/1179c900-e866-4c5a-bb06-6032cc03a075-os-release\") pod \"multus-lrfbg\" (UID: \"1179c900-e866-4c5a-bb06-6032cc03a075\") " pod="openshift-multus/multus-lrfbg" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.876246 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/364060da-3bac-4f3e-b8b8-a64b0441cb5e-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-bp485\" (UID: \"364060da-3bac-4f3e-b8b8-a64b0441cb5e\") " pod="openshift-multus/multus-additional-cni-plugins-bp485" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.876397 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/364060da-3bac-4f3e-b8b8-a64b0441cb5e-tuning-conf-dir\") pod \"multus-additional-cni-plugins-bp485\" (UID: \"364060da-3bac-4f3e-b8b8-a64b0441cb5e\") " pod="openshift-multus/multus-additional-cni-plugins-bp485" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.876637 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/1179c900-e866-4c5a-bb06-6032cc03a075-multus-daemon-config\") pod \"multus-lrfbg\" (UID: \"1179c900-e866-4c5a-bb06-6032cc03a075\") " pod="openshift-multus/multus-lrfbg" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.895910 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bp485" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"364060da-3bac-4f3e-b8b8-a64b0441cb5e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bp485\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:43Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.910429 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f56832a-7fd7-428e-85ee-55c5dea7b67a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ff3cf4013e69405637fe1483d5eb8ab2d12a0ba222c8446d93445dcb94ac9b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b32578d3e75bb839ad134f88850ba1b26e57846431fac71b64c5ddbce802464\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4539e6f57be0c686d5ea6ccbf1537c7c93698f81d2fdcfe5a143ce1f014e09\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57c9f68393ebed4fd5fe219544b0585a743989f8c54239e19ec702bddf9c30f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9dfa222360c949632050bd1dc5453a14c0dfa0d4614282132ceea2297405443e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T21:25:36Z\\\",\\\"message\\\":\\\"W0929 21:25:25.867198 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 21:25:25.867655 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759181125 cert, and key in /tmp/serving-cert-2091738680/serving-signer.crt, /tmp/serving-cert-2091738680/serving-signer.key\\\\nI0929 21:25:26.230899 1 observer_polling.go:159] Starting file observer\\\\nW0929 21:25:26.235864 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 21:25:26.236118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 21:25:26.238856 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2091738680/tls.crt::/tmp/serving-cert-2091738680/tls.key\\\\\\\"\\\\nF0929 21:25:36.606155 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47cf5d1e65f887e84d3c0694d5038edf3451c4af14d08a29d48562d05e36ea26\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:43Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.926146 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8646497-f7ad-4b38-a69f-9a14345af1c8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d3bb437bdac371dd4e22f20f304a5381c128ac2b774c51bc4f748a1c7aa3834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://129d51f84c8798ed7b1078f34899d4231c7bf4e1a75212f694c6c027b8463b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://290e37c77297fae7dcb3540911f6b2917e26daee3623c9106e34de394ccfa01f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7088f5a3752b8778de247eacb408d511197d1501e8e2811a98a9b713ca1fca04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:43Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.942125 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:43Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.968635 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:43Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:43 crc kubenswrapper[4911]: I0929 21:25:43.987880 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4052e710e0a06bf38d26e6006248d090051284bc0eae7a68b17241ef8016909\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a7d7fad5efb67cc7ef796977f03cc9c0e69fcc4340ca28749fffcc13236ed8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:43Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.005118 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lrfbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1179c900-e866-4c5a-bb06-6032cc03a075\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jwhfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lrfbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:44Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.022134 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-w647f"] Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.022734 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-w647f" Sep 29 21:25:44 crc kubenswrapper[4911]: W0929 21:25:44.026406 4911 reflector.go:561] object-"openshift-machine-config-operator"/"proxy-tls": failed to list *v1.Secret: secrets "proxy-tls" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-machine-config-operator": no relationship found between node 'crc' and this object Sep 29 21:25:44 crc kubenswrapper[4911]: W0929 21:25:44.026432 4911 reflector.go:561] object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq": failed to list *v1.Secret: secrets "machine-config-daemon-dockercfg-r5tcq" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-machine-config-operator": no relationship found between node 'crc' and this object Sep 29 21:25:44 crc kubenswrapper[4911]: W0929 21:25:44.026458 4911 reflector.go:561] object-"openshift-machine-config-operator"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-machine-config-operator": no relationship found between node 'crc' and this object Sep 29 21:25:44 crc kubenswrapper[4911]: E0929 21:25:44.026489 4911 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-config-operator\"/\"machine-config-daemon-dockercfg-r5tcq\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"machine-config-daemon-dockercfg-r5tcq\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-machine-config-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Sep 29 21:25:44 crc kubenswrapper[4911]: E0929 21:25:44.026448 4911 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-config-operator\"/\"proxy-tls\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"proxy-tls\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-machine-config-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Sep 29 21:25:44 crc kubenswrapper[4911]: E0929 21:25:44.026521 4911 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-config-operator\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-machine-config-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.027061 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Sep 29 21:25:44 crc kubenswrapper[4911]: W0929 21:25:44.031443 4911 reflector.go:561] object-"openshift-machine-config-operator"/"kube-rbac-proxy": failed to list *v1.ConfigMap: configmaps "kube-rbac-proxy" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-machine-config-operator": no relationship found between node 'crc' and this object Sep 29 21:25:44 crc kubenswrapper[4911]: E0929 21:25:44.031540 4911 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-config-operator\"/\"kube-rbac-proxy\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-rbac-proxy\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-machine-config-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.031482 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-9wxd8"] Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.033005 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.037302 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.037638 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.038532 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.043133 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.043255 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.044439 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.047183 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.058079 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w647f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50640abc-40db-4390-82d1-f3cfc76da71c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w647f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:44Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.078063 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-run-systemd\") pod \"ovnkube-node-9wxd8\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.078126 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-node-log\") pod \"ovnkube-node-9wxd8\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.078146 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-host-run-ovn-kubernetes\") pod \"ovnkube-node-9wxd8\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.078264 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/50640abc-40db-4390-82d1-f3cfc76da71c-proxy-tls\") pod \"machine-config-daemon-w647f\" (UID: \"50640abc-40db-4390-82d1-f3cfc76da71c\") " pod="openshift-machine-config-operator/machine-config-daemon-w647f" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.078348 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rnzb7\" (UniqueName: \"kubernetes.io/projected/4e3aa70f-b0da-44c9-a850-96d4494b02fc-kube-api-access-rnzb7\") pod \"ovnkube-node-9wxd8\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.078377 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/50640abc-40db-4390-82d1-f3cfc76da71c-mcd-auth-proxy-config\") pod \"machine-config-daemon-w647f\" (UID: \"50640abc-40db-4390-82d1-f3cfc76da71c\") " pod="openshift-machine-config-operator/machine-config-daemon-w647f" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.078392 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zmgnb\" (UniqueName: \"kubernetes.io/projected/50640abc-40db-4390-82d1-f3cfc76da71c-kube-api-access-zmgnb\") pod \"machine-config-daemon-w647f\" (UID: \"50640abc-40db-4390-82d1-f3cfc76da71c\") " pod="openshift-machine-config-operator/machine-config-daemon-w647f" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.078410 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-systemd-units\") pod \"ovnkube-node-9wxd8\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.078443 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-host-slash\") pod \"ovnkube-node-9wxd8\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.078458 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/4e3aa70f-b0da-44c9-a850-96d4494b02fc-ovnkube-config\") pod \"ovnkube-node-9wxd8\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.078475 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-host-kubelet\") pod \"ovnkube-node-9wxd8\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.078491 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-run-openvswitch\") pod \"ovnkube-node-9wxd8\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.078510 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-host-cni-netd\") pod \"ovnkube-node-9wxd8\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.078526 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-9wxd8\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.078546 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/4e3aa70f-b0da-44c9-a850-96d4494b02fc-env-overrides\") pod \"ovnkube-node-9wxd8\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.078563 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-log-socket\") pod \"ovnkube-node-9wxd8\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.078582 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-host-cni-bin\") pod \"ovnkube-node-9wxd8\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.078639 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-run-ovn\") pod \"ovnkube-node-9wxd8\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.078677 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/4e3aa70f-b0da-44c9-a850-96d4494b02fc-ovn-node-metrics-cert\") pod \"ovnkube-node-9wxd8\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.078726 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/50640abc-40db-4390-82d1-f3cfc76da71c-rootfs\") pod \"machine-config-daemon-w647f\" (UID: \"50640abc-40db-4390-82d1-f3cfc76da71c\") " pod="openshift-machine-config-operator/machine-config-daemon-w647f" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.078756 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-var-lib-openvswitch\") pod \"ovnkube-node-9wxd8\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.078780 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-etc-openvswitch\") pod \"ovnkube-node-9wxd8\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.078815 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-host-run-netns\") pod \"ovnkube-node-9wxd8\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.078833 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/4e3aa70f-b0da-44c9-a850-96d4494b02fc-ovnkube-script-lib\") pod \"ovnkube-node-9wxd8\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.114942 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8646497-f7ad-4b38-a69f-9a14345af1c8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d3bb437bdac371dd4e22f20f304a5381c128ac2b774c51bc4f748a1c7aa3834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://129d51f84c8798ed7b1078f34899d4231c7bf4e1a75212f694c6c027b8463b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://290e37c77297fae7dcb3540911f6b2917e26daee3623c9106e34de394ccfa01f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7088f5a3752b8778de247eacb408d511197d1501e8e2811a98a9b713ca1fca04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:44Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.136220 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:44Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.149517 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:44Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.163019 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4052e710e0a06bf38d26e6006248d090051284bc0eae7a68b17241ef8016909\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a7d7fad5efb67cc7ef796977f03cc9c0e69fcc4340ca28749fffcc13236ed8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:44Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.177940 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lrfbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1179c900-e866-4c5a-bb06-6032cc03a075\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jwhfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lrfbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:44Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.179155 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-run-systemd\") pod \"ovnkube-node-9wxd8\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.179198 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-host-run-ovn-kubernetes\") pod \"ovnkube-node-9wxd8\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.179226 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-node-log\") pod \"ovnkube-node-9wxd8\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.179252 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/50640abc-40db-4390-82d1-f3cfc76da71c-proxy-tls\") pod \"machine-config-daemon-w647f\" (UID: \"50640abc-40db-4390-82d1-f3cfc76da71c\") " pod="openshift-machine-config-operator/machine-config-daemon-w647f" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.179270 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rnzb7\" (UniqueName: \"kubernetes.io/projected/4e3aa70f-b0da-44c9-a850-96d4494b02fc-kube-api-access-rnzb7\") pod \"ovnkube-node-9wxd8\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.179288 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-run-systemd\") pod \"ovnkube-node-9wxd8\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.179287 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-systemd-units\") pod \"ovnkube-node-9wxd8\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.179353 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-node-log\") pod \"ovnkube-node-9wxd8\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.179406 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-host-run-ovn-kubernetes\") pod \"ovnkube-node-9wxd8\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.179449 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/50640abc-40db-4390-82d1-f3cfc76da71c-mcd-auth-proxy-config\") pod \"machine-config-daemon-w647f\" (UID: \"50640abc-40db-4390-82d1-f3cfc76da71c\") " pod="openshift-machine-config-operator/machine-config-daemon-w647f" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.179470 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zmgnb\" (UniqueName: \"kubernetes.io/projected/50640abc-40db-4390-82d1-f3cfc76da71c-kube-api-access-zmgnb\") pod \"machine-config-daemon-w647f\" (UID: \"50640abc-40db-4390-82d1-f3cfc76da71c\") " pod="openshift-machine-config-operator/machine-config-daemon-w647f" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.179450 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-systemd-units\") pod \"ovnkube-node-9wxd8\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.179524 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-host-slash\") pod \"ovnkube-node-9wxd8\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.179501 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-host-slash\") pod \"ovnkube-node-9wxd8\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.179621 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/4e3aa70f-b0da-44c9-a850-96d4494b02fc-ovnkube-config\") pod \"ovnkube-node-9wxd8\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.179656 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-host-kubelet\") pod \"ovnkube-node-9wxd8\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.179682 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-run-openvswitch\") pod \"ovnkube-node-9wxd8\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.179703 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-host-cni-netd\") pod \"ovnkube-node-9wxd8\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.179715 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-host-kubelet\") pod \"ovnkube-node-9wxd8\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.179734 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-9wxd8\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.179754 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/4e3aa70f-b0da-44c9-a850-96d4494b02fc-env-overrides\") pod \"ovnkube-node-9wxd8\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.179769 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-run-openvswitch\") pod \"ovnkube-node-9wxd8\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.179771 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-log-socket\") pod \"ovnkube-node-9wxd8\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.179816 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-log-socket\") pod \"ovnkube-node-9wxd8\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.179828 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-host-cni-bin\") pod \"ovnkube-node-9wxd8\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.179841 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-host-cni-netd\") pod \"ovnkube-node-9wxd8\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.179847 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-run-ovn\") pod \"ovnkube-node-9wxd8\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.179864 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/4e3aa70f-b0da-44c9-a850-96d4494b02fc-ovn-node-metrics-cert\") pod \"ovnkube-node-9wxd8\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.179868 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-9wxd8\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.179883 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/50640abc-40db-4390-82d1-f3cfc76da71c-rootfs\") pod \"machine-config-daemon-w647f\" (UID: \"50640abc-40db-4390-82d1-f3cfc76da71c\") " pod="openshift-machine-config-operator/machine-config-daemon-w647f" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.179902 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-var-lib-openvswitch\") pod \"ovnkube-node-9wxd8\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.179918 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-etc-openvswitch\") pod \"ovnkube-node-9wxd8\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.179933 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-host-run-netns\") pod \"ovnkube-node-9wxd8\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.179948 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/4e3aa70f-b0da-44c9-a850-96d4494b02fc-ovnkube-script-lib\") pod \"ovnkube-node-9wxd8\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.180023 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/50640abc-40db-4390-82d1-f3cfc76da71c-rootfs\") pod \"machine-config-daemon-w647f\" (UID: \"50640abc-40db-4390-82d1-f3cfc76da71c\") " pod="openshift-machine-config-operator/machine-config-daemon-w647f" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.180068 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-host-cni-bin\") pod \"ovnkube-node-9wxd8\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.180097 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-run-ovn\") pod \"ovnkube-node-9wxd8\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.180532 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/4e3aa70f-b0da-44c9-a850-96d4494b02fc-env-overrides\") pod \"ovnkube-node-9wxd8\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.180589 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-etc-openvswitch\") pod \"ovnkube-node-9wxd8\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.180608 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/4e3aa70f-b0da-44c9-a850-96d4494b02fc-ovnkube-config\") pod \"ovnkube-node-9wxd8\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.180632 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-host-run-netns\") pod \"ovnkube-node-9wxd8\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.180613 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-var-lib-openvswitch\") pod \"ovnkube-node-9wxd8\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.180780 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/4e3aa70f-b0da-44c9-a850-96d4494b02fc-ovnkube-script-lib\") pod \"ovnkube-node-9wxd8\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.186354 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/4e3aa70f-b0da-44c9-a850-96d4494b02fc-ovn-node-metrics-cert\") pod \"ovnkube-node-9wxd8\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.198996 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f56832a-7fd7-428e-85ee-55c5dea7b67a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ff3cf4013e69405637fe1483d5eb8ab2d12a0ba222c8446d93445dcb94ac9b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b32578d3e75bb839ad134f88850ba1b26e57846431fac71b64c5ddbce802464\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4539e6f57be0c686d5ea6ccbf1537c7c93698f81d2fdcfe5a143ce1f014e09\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57c9f68393ebed4fd5fe219544b0585a743989f8c54239e19ec702bddf9c30f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9dfa222360c949632050bd1dc5453a14c0dfa0d4614282132ceea2297405443e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T21:25:36Z\\\",\\\"message\\\":\\\"W0929 21:25:25.867198 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 21:25:25.867655 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759181125 cert, and key in /tmp/serving-cert-2091738680/serving-signer.crt, /tmp/serving-cert-2091738680/serving-signer.key\\\\nI0929 21:25:26.230899 1 observer_polling.go:159] Starting file observer\\\\nW0929 21:25:26.235864 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 21:25:26.236118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 21:25:26.238856 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2091738680/tls.crt::/tmp/serving-cert-2091738680/tls.key\\\\\\\"\\\\nF0929 21:25:36.606155 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47cf5d1e65f887e84d3c0694d5038edf3451c4af14d08a29d48562d05e36ea26\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:44Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.199699 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rnzb7\" (UniqueName: \"kubernetes.io/projected/4e3aa70f-b0da-44c9-a850-96d4494b02fc-kube-api-access-rnzb7\") pod \"ovnkube-node-9wxd8\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.211478 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:44Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.223826 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:44Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.239050 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c6a0af8cc03379c6fa2a3969e5852a340c32bd8513dc7c963e074bb1694d436\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:44Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.249970 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dnhjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fe62a53-f0b8-4c66-8adf-3b9f8bef4195\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b11f03867ce6c678d04431a19903c21acd7665f0cca4c7b0344e9d1cf0e17e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64hpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dnhjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:44Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.266100 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bp485" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"364060da-3bac-4f3e-b8b8-a64b0441cb5e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bp485\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:44Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.277743 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:44Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.291297 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:44Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.304961 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bp485" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"364060da-3bac-4f3e-b8b8-a64b0441cb5e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bp485\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:44Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.317299 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c6a0af8cc03379c6fa2a3969e5852a340c32bd8513dc7c963e074bb1694d436\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:44Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.327609 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dnhjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fe62a53-f0b8-4c66-8adf-3b9f8bef4195\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b11f03867ce6c678d04431a19903c21acd7665f0cca4c7b0344e9d1cf0e17e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64hpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dnhjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:44Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.338866 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w647f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50640abc-40db-4390-82d1-f3cfc76da71c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w647f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:44Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.349477 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.356525 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:44Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:44 crc kubenswrapper[4911]: W0929 21:25:44.365826 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4e3aa70f_b0da_44c9_a850_96d4494b02fc.slice/crio-3461e0c266fae127b8592fa27baa4fe782aa65ed4a243a7fd9b25a0e2f8a6755 WatchSource:0}: Error finding container 3461e0c266fae127b8592fa27baa4fe782aa65ed4a243a7fd9b25a0e2f8a6755: Status 404 returned error can't find the container with id 3461e0c266fae127b8592fa27baa4fe782aa65ed4a243a7fd9b25a0e2f8a6755 Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.370129 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4052e710e0a06bf38d26e6006248d090051284bc0eae7a68b17241ef8016909\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a7d7fad5efb67cc7ef796977f03cc9c0e69fcc4340ca28749fffcc13236ed8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:44Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.384996 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lrfbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1179c900-e866-4c5a-bb06-6032cc03a075\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jwhfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lrfbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:44Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.406366 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e3aa70f-b0da-44c9-a850-96d4494b02fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9wxd8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:44Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.421603 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f56832a-7fd7-428e-85ee-55c5dea7b67a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ff3cf4013e69405637fe1483d5eb8ab2d12a0ba222c8446d93445dcb94ac9b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b32578d3e75bb839ad134f88850ba1b26e57846431fac71b64c5ddbce802464\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4539e6f57be0c686d5ea6ccbf1537c7c93698f81d2fdcfe5a143ce1f014e09\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57c9f68393ebed4fd5fe219544b0585a743989f8c54239e19ec702bddf9c30f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9dfa222360c949632050bd1dc5453a14c0dfa0d4614282132ceea2297405443e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T21:25:36Z\\\",\\\"message\\\":\\\"W0929 21:25:25.867198 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 21:25:25.867655 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759181125 cert, and key in /tmp/serving-cert-2091738680/serving-signer.crt, /tmp/serving-cert-2091738680/serving-signer.key\\\\nI0929 21:25:26.230899 1 observer_polling.go:159] Starting file observer\\\\nW0929 21:25:26.235864 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 21:25:26.236118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 21:25:26.238856 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2091738680/tls.crt::/tmp/serving-cert-2091738680/tls.key\\\\\\\"\\\\nF0929 21:25:36.606155 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47cf5d1e65f887e84d3c0694d5038edf3451c4af14d08a29d48562d05e36ea26\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:44Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.434879 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8646497-f7ad-4b38-a69f-9a14345af1c8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d3bb437bdac371dd4e22f20f304a5381c128ac2b774c51bc4f748a1c7aa3834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://129d51f84c8798ed7b1078f34899d4231c7bf4e1a75212f694c6c027b8463b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://290e37c77297fae7dcb3540911f6b2917e26daee3623c9106e34de394ccfa01f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7088f5a3752b8778de247eacb408d511197d1501e8e2811a98a9b713ca1fca04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:44Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.449614 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:44Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.503273 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.700112 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.700112 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:25:44 crc kubenswrapper[4911]: E0929 21:25:44.700314 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 21:25:44 crc kubenswrapper[4911]: E0929 21:25:44.700360 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.859030 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.871294 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zmgnb\" (UniqueName: \"kubernetes.io/projected/50640abc-40db-4390-82d1-f3cfc76da71c-kube-api-access-zmgnb\") pod \"machine-config-daemon-w647f\" (UID: \"50640abc-40db-4390-82d1-f3cfc76da71c\") " pod="openshift-machine-config-operator/machine-config-daemon-w647f" Sep 29 21:25:44 crc kubenswrapper[4911]: E0929 21:25:44.876260 4911 configmap.go:193] Couldn't get configMap openshift-multus/cni-copy-resources: failed to sync configmap cache: timed out waiting for the condition Sep 29 21:25:44 crc kubenswrapper[4911]: E0929 21:25:44.876346 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/364060da-3bac-4f3e-b8b8-a64b0441cb5e-cni-binary-copy podName:364060da-3bac-4f3e-b8b8-a64b0441cb5e nodeName:}" failed. No retries permitted until 2025-09-29 21:25:45.376326651 +0000 UTC m=+23.353439332 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cni-binary-copy" (UniqueName: "kubernetes.io/configmap/364060da-3bac-4f3e-b8b8-a64b0441cb5e-cni-binary-copy") pod "multus-additional-cni-plugins-bp485" (UID: "364060da-3bac-4f3e-b8b8-a64b0441cb5e") : failed to sync configmap cache: timed out waiting for the condition Sep 29 21:25:44 crc kubenswrapper[4911]: E0929 21:25:44.876266 4911 configmap.go:193] Couldn't get configMap openshift-multus/cni-copy-resources: failed to sync configmap cache: timed out waiting for the condition Sep 29 21:25:44 crc kubenswrapper[4911]: E0929 21:25:44.876478 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/1179c900-e866-4c5a-bb06-6032cc03a075-cni-binary-copy podName:1179c900-e866-4c5a-bb06-6032cc03a075 nodeName:}" failed. No retries permitted until 2025-09-29 21:25:45.376440115 +0000 UTC m=+23.353552786 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cni-binary-copy" (UniqueName: "kubernetes.io/configmap/1179c900-e866-4c5a-bb06-6032cc03a075-cni-binary-copy") pod "multus-lrfbg" (UID: "1179c900-e866-4c5a-bb06-6032cc03a075") : failed to sync configmap cache: timed out waiting for the condition Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.887775 4911 generic.go:334] "Generic (PLEG): container finished" podID="4e3aa70f-b0da-44c9-a850-96d4494b02fc" containerID="54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b" exitCode=0 Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.887851 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" event={"ID":"4e3aa70f-b0da-44c9-a850-96d4494b02fc","Type":"ContainerDied","Data":"54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b"} Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.887927 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" event={"ID":"4e3aa70f-b0da-44c9-a850-96d4494b02fc","Type":"ContainerStarted","Data":"3461e0c266fae127b8592fa27baa4fe782aa65ed4a243a7fd9b25a0e2f8a6755"} Sep 29 21:25:44 crc kubenswrapper[4911]: E0929 21:25:44.888700 4911 projected.go:288] Couldn't get configMap openshift-multus/kube-root-ca.crt: failed to sync configmap cache: timed out waiting for the condition Sep 29 21:25:44 crc kubenswrapper[4911]: E0929 21:25:44.888763 4911 projected.go:194] Error preparing data for projected volume kube-api-access-jwhfq for pod openshift-multus/multus-lrfbg: failed to sync configmap cache: timed out waiting for the condition Sep 29 21:25:44 crc kubenswrapper[4911]: E0929 21:25:44.888856 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/1179c900-e866-4c5a-bb06-6032cc03a075-kube-api-access-jwhfq podName:1179c900-e866-4c5a-bb06-6032cc03a075 nodeName:}" failed. No retries permitted until 2025-09-29 21:25:45.388829753 +0000 UTC m=+23.365942644 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-jwhfq" (UniqueName: "kubernetes.io/projected/1179c900-e866-4c5a-bb06-6032cc03a075-kube-api-access-jwhfq") pod "multus-lrfbg" (UID: "1179c900-e866-4c5a-bb06-6032cc03a075") : failed to sync configmap cache: timed out waiting for the condition Sep 29 21:25:44 crc kubenswrapper[4911]: E0929 21:25:44.889519 4911 projected.go:288] Couldn't get configMap openshift-multus/kube-root-ca.crt: failed to sync configmap cache: timed out waiting for the condition Sep 29 21:25:44 crc kubenswrapper[4911]: E0929 21:25:44.889554 4911 projected.go:194] Error preparing data for projected volume kube-api-access-ng4nx for pod openshift-multus/multus-additional-cni-plugins-bp485: failed to sync configmap cache: timed out waiting for the condition Sep 29 21:25:44 crc kubenswrapper[4911]: E0929 21:25:44.889602 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/364060da-3bac-4f3e-b8b8-a64b0441cb5e-kube-api-access-ng4nx podName:364060da-3bac-4f3e-b8b8-a64b0441cb5e nodeName:}" failed. No retries permitted until 2025-09-29 21:25:45.389588925 +0000 UTC m=+23.366701826 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-ng4nx" (UniqueName: "kubernetes.io/projected/364060da-3bac-4f3e-b8b8-a64b0441cb5e-kube-api-access-ng4nx") pod "multus-additional-cni-plugins-bp485" (UID: "364060da-3bac-4f3e-b8b8-a64b0441cb5e") : failed to sync configmap cache: timed out waiting for the condition Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.889670 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"a92f3bb864cba0c6615e8a07233e923c0d0681e83ad6722aa7c7043fb5966d7d"} Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.901286 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w647f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50640abc-40db-4390-82d1-f3cfc76da71c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w647f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:44Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.903466 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.919349 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4052e710e0a06bf38d26e6006248d090051284bc0eae7a68b17241ef8016909\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a7d7fad5efb67cc7ef796977f03cc9c0e69fcc4340ca28749fffcc13236ed8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:44Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.941377 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lrfbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1179c900-e866-4c5a-bb06-6032cc03a075\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jwhfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lrfbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:44Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.963872 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e3aa70f-b0da-44c9-a850-96d4494b02fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9wxd8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:44Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.965542 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.979004 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f56832a-7fd7-428e-85ee-55c5dea7b67a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ff3cf4013e69405637fe1483d5eb8ab2d12a0ba222c8446d93445dcb94ac9b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b32578d3e75bb839ad134f88850ba1b26e57846431fac71b64c5ddbce802464\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4539e6f57be0c686d5ea6ccbf1537c7c93698f81d2fdcfe5a143ce1f014e09\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57c9f68393ebed4fd5fe219544b0585a743989f8c54239e19ec702bddf9c30f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9dfa222360c949632050bd1dc5453a14c0dfa0d4614282132ceea2297405443e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T21:25:36Z\\\",\\\"message\\\":\\\"W0929 21:25:25.867198 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 21:25:25.867655 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759181125 cert, and key in /tmp/serving-cert-2091738680/serving-signer.crt, /tmp/serving-cert-2091738680/serving-signer.key\\\\nI0929 21:25:26.230899 1 observer_polling.go:159] Starting file observer\\\\nW0929 21:25:26.235864 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 21:25:26.236118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 21:25:26.238856 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2091738680/tls.crt::/tmp/serving-cert-2091738680/tls.key\\\\\\\"\\\\nF0929 21:25:36.606155 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47cf5d1e65f887e84d3c0694d5038edf3451c4af14d08a29d48562d05e36ea26\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:44Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:44 crc kubenswrapper[4911]: I0929 21:25:44.993593 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8646497-f7ad-4b38-a69f-9a14345af1c8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d3bb437bdac371dd4e22f20f304a5381c128ac2b774c51bc4f748a1c7aa3834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://129d51f84c8798ed7b1078f34899d4231c7bf4e1a75212f694c6c027b8463b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://290e37c77297fae7dcb3540911f6b2917e26daee3623c9106e34de394ccfa01f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7088f5a3752b8778de247eacb408d511197d1501e8e2811a98a9b713ca1fca04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:44Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.010354 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:45Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.025165 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:45Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.039153 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:45Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.054295 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:45Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.069635 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c6a0af8cc03379c6fa2a3969e5852a340c32bd8513dc7c963e074bb1694d436\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:45Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.080311 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dnhjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fe62a53-f0b8-4c66-8adf-3b9f8bef4195\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b11f03867ce6c678d04431a19903c21acd7665f0cca4c7b0344e9d1cf0e17e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64hpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dnhjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:45Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.102327 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.105860 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bp485" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"364060da-3bac-4f3e-b8b8-a64b0441cb5e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bp485\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:45Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.125143 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8646497-f7ad-4b38-a69f-9a14345af1c8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d3bb437bdac371dd4e22f20f304a5381c128ac2b774c51bc4f748a1c7aa3834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://129d51f84c8798ed7b1078f34899d4231c7bf4e1a75212f694c6c027b8463b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://290e37c77297fae7dcb3540911f6b2917e26daee3623c9106e34de394ccfa01f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7088f5a3752b8778de247eacb408d511197d1501e8e2811a98a9b713ca1fca04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:45Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.147458 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:45Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.168578 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:45Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.170399 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-dz6zq"] Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.171076 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-dz6zq" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.173112 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.174449 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.174658 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.175234 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Sep 29 21:25:45 crc kubenswrapper[4911]: E0929 21:25:45.179959 4911 secret.go:188] Couldn't get secret openshift-machine-config-operator/proxy-tls: failed to sync secret cache: timed out waiting for the condition Sep 29 21:25:45 crc kubenswrapper[4911]: E0929 21:25:45.180111 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/50640abc-40db-4390-82d1-f3cfc76da71c-proxy-tls podName:50640abc-40db-4390-82d1-f3cfc76da71c nodeName:}" failed. No retries permitted until 2025-09-29 21:25:45.680084887 +0000 UTC m=+23.657197568 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "proxy-tls" (UniqueName: "kubernetes.io/secret/50640abc-40db-4390-82d1-f3cfc76da71c-proxy-tls") pod "machine-config-daemon-w647f" (UID: "50640abc-40db-4390-82d1-f3cfc76da71c") : failed to sync secret cache: timed out waiting for the condition Sep 29 21:25:45 crc kubenswrapper[4911]: E0929 21:25:45.180469 4911 configmap.go:193] Couldn't get configMap openshift-machine-config-operator/kube-rbac-proxy: failed to sync configmap cache: timed out waiting for the condition Sep 29 21:25:45 crc kubenswrapper[4911]: E0929 21:25:45.180586 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/50640abc-40db-4390-82d1-f3cfc76da71c-mcd-auth-proxy-config podName:50640abc-40db-4390-82d1-f3cfc76da71c nodeName:}" failed. No retries permitted until 2025-09-29 21:25:45.680559382 +0000 UTC m=+23.657672213 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "mcd-auth-proxy-config" (UniqueName: "kubernetes.io/configmap/50640abc-40db-4390-82d1-f3cfc76da71c-mcd-auth-proxy-config") pod "machine-config-daemon-w647f" (UID: "50640abc-40db-4390-82d1-f3cfc76da71c") : failed to sync configmap cache: timed out waiting for the condition Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.200401 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0c16af8d-e647-4820-b96f-298cce113ab1-host\") pod \"node-ca-dz6zq\" (UID: \"0c16af8d-e647-4820-b96f-298cce113ab1\") " pod="openshift-image-registry/node-ca-dz6zq" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.200461 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/0c16af8d-e647-4820-b96f-298cce113ab1-serviceca\") pod \"node-ca-dz6zq\" (UID: \"0c16af8d-e647-4820-b96f-298cce113ab1\") " pod="openshift-image-registry/node-ca-dz6zq" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.200510 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nbgg2\" (UniqueName: \"kubernetes.io/projected/0c16af8d-e647-4820-b96f-298cce113ab1-kube-api-access-nbgg2\") pod \"node-ca-dz6zq\" (UID: \"0c16af8d-e647-4820-b96f-298cce113ab1\") " pod="openshift-image-registry/node-ca-dz6zq" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.231634 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4052e710e0a06bf38d26e6006248d090051284bc0eae7a68b17241ef8016909\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a7d7fad5efb67cc7ef796977f03cc9c0e69fcc4340ca28749fffcc13236ed8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:45Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.246479 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.246891 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lrfbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1179c900-e866-4c5a-bb06-6032cc03a075\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jwhfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lrfbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:45Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.271073 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e3aa70f-b0da-44c9-a850-96d4494b02fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9wxd8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:45Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.287448 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f56832a-7fd7-428e-85ee-55c5dea7b67a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ff3cf4013e69405637fe1483d5eb8ab2d12a0ba222c8446d93445dcb94ac9b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b32578d3e75bb839ad134f88850ba1b26e57846431fac71b64c5ddbce802464\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4539e6f57be0c686d5ea6ccbf1537c7c93698f81d2fdcfe5a143ce1f014e09\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57c9f68393ebed4fd5fe219544b0585a743989f8c54239e19ec702bddf9c30f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9dfa222360c949632050bd1dc5453a14c0dfa0d4614282132ceea2297405443e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T21:25:36Z\\\",\\\"message\\\":\\\"W0929 21:25:25.867198 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 21:25:25.867655 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759181125 cert, and key in /tmp/serving-cert-2091738680/serving-signer.crt, /tmp/serving-cert-2091738680/serving-signer.key\\\\nI0929 21:25:26.230899 1 observer_polling.go:159] Starting file observer\\\\nW0929 21:25:26.235864 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 21:25:26.236118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 21:25:26.238856 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2091738680/tls.crt::/tmp/serving-cert-2091738680/tls.key\\\\\\\"\\\\nF0929 21:25:36.606155 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47cf5d1e65f887e84d3c0694d5038edf3451c4af14d08a29d48562d05e36ea26\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:45Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.301673 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/0c16af8d-e647-4820-b96f-298cce113ab1-serviceca\") pod \"node-ca-dz6zq\" (UID: \"0c16af8d-e647-4820-b96f-298cce113ab1\") " pod="openshift-image-registry/node-ca-dz6zq" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.301734 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nbgg2\" (UniqueName: \"kubernetes.io/projected/0c16af8d-e647-4820-b96f-298cce113ab1-kube-api-access-nbgg2\") pod \"node-ca-dz6zq\" (UID: \"0c16af8d-e647-4820-b96f-298cce113ab1\") " pod="openshift-image-registry/node-ca-dz6zq" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.301946 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0c16af8d-e647-4820-b96f-298cce113ab1-host\") pod \"node-ca-dz6zq\" (UID: \"0c16af8d-e647-4820-b96f-298cce113ab1\") " pod="openshift-image-registry/node-ca-dz6zq" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.302033 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0c16af8d-e647-4820-b96f-298cce113ab1-host\") pod \"node-ca-dz6zq\" (UID: \"0c16af8d-e647-4820-b96f-298cce113ab1\") " pod="openshift-image-registry/node-ca-dz6zq" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.303092 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/0c16af8d-e647-4820-b96f-298cce113ab1-serviceca\") pod \"node-ca-dz6zq\" (UID: \"0c16af8d-e647-4820-b96f-298cce113ab1\") " pod="openshift-image-registry/node-ca-dz6zq" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.303960 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:45Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.320191 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a92f3bb864cba0c6615e8a07233e923c0d0681e83ad6722aa7c7043fb5966d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:45Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.324496 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nbgg2\" (UniqueName: \"kubernetes.io/projected/0c16af8d-e647-4820-b96f-298cce113ab1-kube-api-access-nbgg2\") pod \"node-ca-dz6zq\" (UID: \"0c16af8d-e647-4820-b96f-298cce113ab1\") " pod="openshift-image-registry/node-ca-dz6zq" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.339333 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c6a0af8cc03379c6fa2a3969e5852a340c32bd8513dc7c963e074bb1694d436\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:45Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.357917 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dnhjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fe62a53-f0b8-4c66-8adf-3b9f8bef4195\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b11f03867ce6c678d04431a19903c21acd7665f0cca4c7b0344e9d1cf0e17e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64hpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dnhjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:45Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.376943 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bp485" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"364060da-3bac-4f3e-b8b8-a64b0441cb5e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bp485\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:45Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.392538 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w647f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50640abc-40db-4390-82d1-f3cfc76da71c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w647f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:45Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.403374 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/1179c900-e866-4c5a-bb06-6032cc03a075-cni-binary-copy\") pod \"multus-lrfbg\" (UID: \"1179c900-e866-4c5a-bb06-6032cc03a075\") " pod="openshift-multus/multus-lrfbg" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.403430 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jwhfq\" (UniqueName: \"kubernetes.io/projected/1179c900-e866-4c5a-bb06-6032cc03a075-kube-api-access-jwhfq\") pod \"multus-lrfbg\" (UID: \"1179c900-e866-4c5a-bb06-6032cc03a075\") " pod="openshift-multus/multus-lrfbg" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.403457 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/364060da-3bac-4f3e-b8b8-a64b0441cb5e-cni-binary-copy\") pod \"multus-additional-cni-plugins-bp485\" (UID: \"364060da-3bac-4f3e-b8b8-a64b0441cb5e\") " pod="openshift-multus/multus-additional-cni-plugins-bp485" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.403479 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ng4nx\" (UniqueName: \"kubernetes.io/projected/364060da-3bac-4f3e-b8b8-a64b0441cb5e-kube-api-access-ng4nx\") pod \"multus-additional-cni-plugins-bp485\" (UID: \"364060da-3bac-4f3e-b8b8-a64b0441cb5e\") " pod="openshift-multus/multus-additional-cni-plugins-bp485" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.404535 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/1179c900-e866-4c5a-bb06-6032cc03a075-cni-binary-copy\") pod \"multus-lrfbg\" (UID: \"1179c900-e866-4c5a-bb06-6032cc03a075\") " pod="openshift-multus/multus-lrfbg" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.404545 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/364060da-3bac-4f3e-b8b8-a64b0441cb5e-cni-binary-copy\") pod \"multus-additional-cni-plugins-bp485\" (UID: \"364060da-3bac-4f3e-b8b8-a64b0441cb5e\") " pod="openshift-multus/multus-additional-cni-plugins-bp485" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.407221 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ng4nx\" (UniqueName: \"kubernetes.io/projected/364060da-3bac-4f3e-b8b8-a64b0441cb5e-kube-api-access-ng4nx\") pod \"multus-additional-cni-plugins-bp485\" (UID: \"364060da-3bac-4f3e-b8b8-a64b0441cb5e\") " pod="openshift-multus/multus-additional-cni-plugins-bp485" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.407505 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jwhfq\" (UniqueName: \"kubernetes.io/projected/1179c900-e866-4c5a-bb06-6032cc03a075-kube-api-access-jwhfq\") pod \"multus-lrfbg\" (UID: \"1179c900-e866-4c5a-bb06-6032cc03a075\") " pod="openshift-multus/multus-lrfbg" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.409484 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lrfbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1179c900-e866-4c5a-bb06-6032cc03a075\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jwhfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lrfbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:45Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.433585 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e3aa70f-b0da-44c9-a850-96d4494b02fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9wxd8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:45Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.449236 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f56832a-7fd7-428e-85ee-55c5dea7b67a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ff3cf4013e69405637fe1483d5eb8ab2d12a0ba222c8446d93445dcb94ac9b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b32578d3e75bb839ad134f88850ba1b26e57846431fac71b64c5ddbce802464\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4539e6f57be0c686d5ea6ccbf1537c7c93698f81d2fdcfe5a143ce1f014e09\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57c9f68393ebed4fd5fe219544b0585a743989f8c54239e19ec702bddf9c30f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9dfa222360c949632050bd1dc5453a14c0dfa0d4614282132ceea2297405443e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T21:25:36Z\\\",\\\"message\\\":\\\"W0929 21:25:25.867198 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 21:25:25.867655 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759181125 cert, and key in /tmp/serving-cert-2091738680/serving-signer.crt, /tmp/serving-cert-2091738680/serving-signer.key\\\\nI0929 21:25:26.230899 1 observer_polling.go:159] Starting file observer\\\\nW0929 21:25:26.235864 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 21:25:26.236118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 21:25:26.238856 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2091738680/tls.crt::/tmp/serving-cert-2091738680/tls.key\\\\\\\"\\\\nF0929 21:25:36.606155 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47cf5d1e65f887e84d3c0694d5038edf3451c4af14d08a29d48562d05e36ea26\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:45Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.467412 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8646497-f7ad-4b38-a69f-9a14345af1c8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d3bb437bdac371dd4e22f20f304a5381c128ac2b774c51bc4f748a1c7aa3834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://129d51f84c8798ed7b1078f34899d4231c7bf4e1a75212f694c6c027b8463b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://290e37c77297fae7dcb3540911f6b2917e26daee3623c9106e34de394ccfa01f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7088f5a3752b8778de247eacb408d511197d1501e8e2811a98a9b713ca1fca04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:45Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.469993 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-lrfbg" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.474934 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-bp485" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.479057 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:45Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:45 crc kubenswrapper[4911]: W0929 21:25:45.483815 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1179c900_e866_4c5a_bb06_6032cc03a075.slice/crio-19c4093859ba4d2f1086b8328542a2fa67f8f48d36fa94ffd7b964297036149f WatchSource:0}: Error finding container 19c4093859ba4d2f1086b8328542a2fa67f8f48d36fa94ffd7b964297036149f: Status 404 returned error can't find the container with id 19c4093859ba4d2f1086b8328542a2fa67f8f48d36fa94ffd7b964297036149f Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.498785 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:45Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.516456 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4052e710e0a06bf38d26e6006248d090051284bc0eae7a68b17241ef8016909\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a7d7fad5efb67cc7ef796977f03cc9c0e69fcc4340ca28749fffcc13236ed8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:45Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.548621 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:45Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.576235 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-dz6zq" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.589653 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a92f3bb864cba0c6615e8a07233e923c0d0681e83ad6722aa7c7043fb5966d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:45Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.601318 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.606526 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:25:45 crc kubenswrapper[4911]: E0929 21:25:45.606981 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 21:25:49.606960695 +0000 UTC m=+27.584073366 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.681380 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c6a0af8cc03379c6fa2a3969e5852a340c32bd8513dc7c963e074bb1694d436\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:45Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.701044 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 21:25:45 crc kubenswrapper[4911]: E0929 21:25:45.701219 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.713225 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.713270 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.713293 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.713315 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/50640abc-40db-4390-82d1-f3cfc76da71c-proxy-tls\") pod \"machine-config-daemon-w647f\" (UID: \"50640abc-40db-4390-82d1-f3cfc76da71c\") " pod="openshift-machine-config-operator/machine-config-daemon-w647f" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.713332 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/50640abc-40db-4390-82d1-f3cfc76da71c-mcd-auth-proxy-config\") pod \"machine-config-daemon-w647f\" (UID: \"50640abc-40db-4390-82d1-f3cfc76da71c\") " pod="openshift-machine-config-operator/machine-config-daemon-w647f" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.713349 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:25:45 crc kubenswrapper[4911]: E0929 21:25:45.713433 4911 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 21:25:45 crc kubenswrapper[4911]: E0929 21:25:45.713462 4911 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 21:25:45 crc kubenswrapper[4911]: E0929 21:25:45.713488 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 21:25:49.713474051 +0000 UTC m=+27.690586722 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 21:25:45 crc kubenswrapper[4911]: E0929 21:25:45.713541 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 21:25:49.713520822 +0000 UTC m=+27.690633513 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 21:25:45 crc kubenswrapper[4911]: E0929 21:25:45.713575 4911 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 21:25:45 crc kubenswrapper[4911]: E0929 21:25:45.713587 4911 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 21:25:45 crc kubenswrapper[4911]: E0929 21:25:45.713598 4911 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 21:25:45 crc kubenswrapper[4911]: E0929 21:25:45.713620 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-29 21:25:49.713613445 +0000 UTC m=+27.690726116 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 21:25:45 crc kubenswrapper[4911]: E0929 21:25:45.713665 4911 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 21:25:45 crc kubenswrapper[4911]: E0929 21:25:45.713674 4911 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 21:25:45 crc kubenswrapper[4911]: E0929 21:25:45.713680 4911 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 21:25:45 crc kubenswrapper[4911]: E0929 21:25:45.713699 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-29 21:25:49.713693397 +0000 UTC m=+27.690806068 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.714607 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/50640abc-40db-4390-82d1-f3cfc76da71c-mcd-auth-proxy-config\") pod \"machine-config-daemon-w647f\" (UID: \"50640abc-40db-4390-82d1-f3cfc76da71c\") " pod="openshift-machine-config-operator/machine-config-daemon-w647f" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.721913 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dnhjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fe62a53-f0b8-4c66-8adf-3b9f8bef4195\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b11f03867ce6c678d04431a19903c21acd7665f0cca4c7b0344e9d1cf0e17e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64hpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dnhjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:45Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.733602 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/50640abc-40db-4390-82d1-f3cfc76da71c-proxy-tls\") pod \"machine-config-daemon-w647f\" (UID: \"50640abc-40db-4390-82d1-f3cfc76da71c\") " pod="openshift-machine-config-operator/machine-config-daemon-w647f" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.758664 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bp485" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"364060da-3bac-4f3e-b8b8-a64b0441cb5e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bp485\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:45Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.759969 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.773608 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.777904 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.787951 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dz6zq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c16af8d-e647-4820-b96f-298cce113ab1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbgg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dz6zq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:45Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.833327 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w647f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50640abc-40db-4390-82d1-f3cfc76da71c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w647f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:45Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.837506 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-w647f" Sep 29 21:25:45 crc kubenswrapper[4911]: W0929 21:25:45.848917 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod50640abc_40db_4390_82d1_f3cfc76da71c.slice/crio-36a558fabe693fa90370261d623f3c9f86d0bae0066070acee1d776d87ae7bd6 WatchSource:0}: Error finding container 36a558fabe693fa90370261d623f3c9f86d0bae0066070acee1d776d87ae7bd6: Status 404 returned error can't find the container with id 36a558fabe693fa90370261d623f3c9f86d0bae0066070acee1d776d87ae7bd6 Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.877626 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85fccbbf-637b-4195-89ba-828db0f10fb3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11c497bb84920c85649975dc5a3ba883723017d5503200e47c08427f45b9edfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a594d1677deac968c88fac4a64d4b047cf02ad8d6db74ee2ebe170d4135f588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2db0acd86c2dfcd0303626aa6b66832b18aec98dab908328288cdf0e0a18f24d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72188960402faa7956adf357824c6ea0a7015a75ed5191853ca95b12feb2cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf2588ddec2be0fa843630cdc268617c399a7a1615705cc4b088fc806031d557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0546d8270e2cbba06a241e421fa7ea130e53302a3f71f81829198d7ab8f3e6b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0546d8270e2cbba06a241e421fa7ea130e53302a3f71f81829198d7ab8f3e6b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123ea91d0a01f780d09196824c362eab68a4b8d943d414e80a3a0bbc2959e178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://123ea91d0a01f780d09196824c362eab68a4b8d943d414e80a3a0bbc2959e178\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://17751d3a9dd568ddb6c27efe08354f37aa4af7a91312e8c41e3e2e509d5be8ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://17751d3a9dd568ddb6c27efe08354f37aa4af7a91312e8c41e3e2e509d5be8ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:45Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.904496 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" event={"ID":"4e3aa70f-b0da-44c9-a850-96d4494b02fc","Type":"ContainerStarted","Data":"db1941e9959584d925b6ac546c234ce0dfaa2960a1277f282b51deb5ee56ec87"} Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.904554 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" event={"ID":"4e3aa70f-b0da-44c9-a850-96d4494b02fc","Type":"ContainerStarted","Data":"e87c1e61cde26c6ae4496e13ebe8e46857c8656720399f3d89b9e9a75bb4681d"} Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.904568 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" event={"ID":"4e3aa70f-b0da-44c9-a850-96d4494b02fc","Type":"ContainerStarted","Data":"00681163bfe474dabf977e0c43ac71148c2a472efd4bc57f3d145ba1134a74aa"} Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.904580 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" event={"ID":"4e3aa70f-b0da-44c9-a850-96d4494b02fc","Type":"ContainerStarted","Data":"3568f71a4a1762e0d9be8ce4c317471f3f99a7c5b92b7daeb9fbace778f7e66b"} Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.904593 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" event={"ID":"4e3aa70f-b0da-44c9-a850-96d4494b02fc","Type":"ContainerStarted","Data":"c06f6555e780f2f9f69cabcd3e26595acfde3e973387ed7b10345c673c17f5d0"} Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.904607 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" event={"ID":"4e3aa70f-b0da-44c9-a850-96d4494b02fc","Type":"ContainerStarted","Data":"c62aa953604f4ce35ca351ecf4e03da067af96f004c3ba3ea5f3a54b89def33d"} Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.907046 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-dz6zq" event={"ID":"0c16af8d-e647-4820-b96f-298cce113ab1","Type":"ContainerStarted","Data":"e64c28e47bb861649d58b34f5e3c369cfaf8c808435b32a13ebfc2c02b7b6db9"} Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.907081 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-dz6zq" event={"ID":"0c16af8d-e647-4820-b96f-298cce113ab1","Type":"ContainerStarted","Data":"6ee3d93899add3878d5d748639ef42e80226d2c9ef9d0fc5eb2a41c481c044a5"} Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.910158 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-bp485" event={"ID":"364060da-3bac-4f3e-b8b8-a64b0441cb5e","Type":"ContainerStarted","Data":"7962008bbec5a6556b4ab76cf151a68e971bde07fdc5fe560c561f42cf597d38"} Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.910229 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-bp485" event={"ID":"364060da-3bac-4f3e-b8b8-a64b0441cb5e","Type":"ContainerStarted","Data":"95eb0f9dd3bdf52cb613d3619def1df9a8775286b79e216e1a910b76e29fd831"} Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.911465 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c6a0af8cc03379c6fa2a3969e5852a340c32bd8513dc7c963e074bb1694d436\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:45Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.912919 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-lrfbg" event={"ID":"1179c900-e866-4c5a-bb06-6032cc03a075","Type":"ContainerStarted","Data":"8bb16b2f889ccaf66b96ffea2f6585967c14530a95a95000b652ce817e97c37e"} Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.912951 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-lrfbg" event={"ID":"1179c900-e866-4c5a-bb06-6032cc03a075","Type":"ContainerStarted","Data":"19c4093859ba4d2f1086b8328542a2fa67f8f48d36fa94ffd7b964297036149f"} Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.914275 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w647f" event={"ID":"50640abc-40db-4390-82d1-f3cfc76da71c","Type":"ContainerStarted","Data":"36a558fabe693fa90370261d623f3c9f86d0bae0066070acee1d776d87ae7bd6"} Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.949337 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dnhjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fe62a53-f0b8-4c66-8adf-3b9f8bef4195\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b11f03867ce6c678d04431a19903c21acd7665f0cca4c7b0344e9d1cf0e17e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64hpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dnhjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:45Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:45 crc kubenswrapper[4911]: I0929 21:25:45.991944 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bp485" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"364060da-3bac-4f3e-b8b8-a64b0441cb5e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bp485\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:45Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:46 crc kubenswrapper[4911]: I0929 21:25:46.032896 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dz6zq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c16af8d-e647-4820-b96f-298cce113ab1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbgg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dz6zq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:46Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:46 crc kubenswrapper[4911]: I0929 21:25:46.071651 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w647f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50640abc-40db-4390-82d1-f3cfc76da71c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w647f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:46Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:46 crc kubenswrapper[4911]: I0929 21:25:46.116190 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lrfbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1179c900-e866-4c5a-bb06-6032cc03a075\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jwhfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lrfbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:46Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:46 crc kubenswrapper[4911]: I0929 21:25:46.159740 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e3aa70f-b0da-44c9-a850-96d4494b02fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9wxd8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:46Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:46 crc kubenswrapper[4911]: I0929 21:25:46.190081 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f56832a-7fd7-428e-85ee-55c5dea7b67a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ff3cf4013e69405637fe1483d5eb8ab2d12a0ba222c8446d93445dcb94ac9b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b32578d3e75bb839ad134f88850ba1b26e57846431fac71b64c5ddbce802464\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4539e6f57be0c686d5ea6ccbf1537c7c93698f81d2fdcfe5a143ce1f014e09\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57c9f68393ebed4fd5fe219544b0585a743989f8c54239e19ec702bddf9c30f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9dfa222360c949632050bd1dc5453a14c0dfa0d4614282132ceea2297405443e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T21:25:36Z\\\",\\\"message\\\":\\\"W0929 21:25:25.867198 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 21:25:25.867655 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759181125 cert, and key in /tmp/serving-cert-2091738680/serving-signer.crt, /tmp/serving-cert-2091738680/serving-signer.key\\\\nI0929 21:25:26.230899 1 observer_polling.go:159] Starting file observer\\\\nW0929 21:25:26.235864 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 21:25:26.236118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 21:25:26.238856 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2091738680/tls.crt::/tmp/serving-cert-2091738680/tls.key\\\\\\\"\\\\nF0929 21:25:36.606155 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47cf5d1e65f887e84d3c0694d5038edf3451c4af14d08a29d48562d05e36ea26\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:46Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:46 crc kubenswrapper[4911]: I0929 21:25:46.236757 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8646497-f7ad-4b38-a69f-9a14345af1c8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d3bb437bdac371dd4e22f20f304a5381c128ac2b774c51bc4f748a1c7aa3834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://129d51f84c8798ed7b1078f34899d4231c7bf4e1a75212f694c6c027b8463b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://290e37c77297fae7dcb3540911f6b2917e26daee3623c9106e34de394ccfa01f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7088f5a3752b8778de247eacb408d511197d1501e8e2811a98a9b713ca1fca04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:46Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:46 crc kubenswrapper[4911]: I0929 21:25:46.272948 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:46Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:46 crc kubenswrapper[4911]: I0929 21:25:46.313091 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:46Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:46 crc kubenswrapper[4911]: I0929 21:25:46.357632 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4052e710e0a06bf38d26e6006248d090051284bc0eae7a68b17241ef8016909\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a7d7fad5efb67cc7ef796977f03cc9c0e69fcc4340ca28749fffcc13236ed8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:46Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:46 crc kubenswrapper[4911]: I0929 21:25:46.392213 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:46Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:46 crc kubenswrapper[4911]: I0929 21:25:46.429024 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a92f3bb864cba0c6615e8a07233e923c0d0681e83ad6722aa7c7043fb5966d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:46Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:46 crc kubenswrapper[4911]: I0929 21:25:46.472761 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dz6zq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c16af8d-e647-4820-b96f-298cce113ab1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e64c28e47bb861649d58b34f5e3c369cfaf8c808435b32a13ebfc2c02b7b6db9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbgg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dz6zq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:46Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:46 crc kubenswrapper[4911]: I0929 21:25:46.529324 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85fccbbf-637b-4195-89ba-828db0f10fb3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11c497bb84920c85649975dc5a3ba883723017d5503200e47c08427f45b9edfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a594d1677deac968c88fac4a64d4b047cf02ad8d6db74ee2ebe170d4135f588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2db0acd86c2dfcd0303626aa6b66832b18aec98dab908328288cdf0e0a18f24d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72188960402faa7956adf357824c6ea0a7015a75ed5191853ca95b12feb2cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf2588ddec2be0fa843630cdc268617c399a7a1615705cc4b088fc806031d557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0546d8270e2cbba06a241e421fa7ea130e53302a3f71f81829198d7ab8f3e6b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0546d8270e2cbba06a241e421fa7ea130e53302a3f71f81829198d7ab8f3e6b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123ea91d0a01f780d09196824c362eab68a4b8d943d414e80a3a0bbc2959e178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://123ea91d0a01f780d09196824c362eab68a4b8d943d414e80a3a0bbc2959e178\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://17751d3a9dd568ddb6c27efe08354f37aa4af7a91312e8c41e3e2e509d5be8ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://17751d3a9dd568ddb6c27efe08354f37aa4af7a91312e8c41e3e2e509d5be8ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:46Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:46 crc kubenswrapper[4911]: I0929 21:25:46.553164 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c6a0af8cc03379c6fa2a3969e5852a340c32bd8513dc7c963e074bb1694d436\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:46Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:46 crc kubenswrapper[4911]: I0929 21:25:46.588241 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dnhjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fe62a53-f0b8-4c66-8adf-3b9f8bef4195\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b11f03867ce6c678d04431a19903c21acd7665f0cca4c7b0344e9d1cf0e17e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64hpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dnhjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:46Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:46 crc kubenswrapper[4911]: I0929 21:25:46.633542 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bp485" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"364060da-3bac-4f3e-b8b8-a64b0441cb5e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7962008bbec5a6556b4ab76cf151a68e971bde07fdc5fe560c561f42cf597d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bp485\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:46Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:46 crc kubenswrapper[4911]: I0929 21:25:46.668561 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w647f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50640abc-40db-4390-82d1-f3cfc76da71c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w647f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:46Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:46 crc kubenswrapper[4911]: I0929 21:25:46.700479 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 21:25:46 crc kubenswrapper[4911]: I0929 21:25:46.700550 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:25:46 crc kubenswrapper[4911]: E0929 21:25:46.700671 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 21:25:46 crc kubenswrapper[4911]: E0929 21:25:46.700977 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 21:25:46 crc kubenswrapper[4911]: I0929 21:25:46.715347 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4052e710e0a06bf38d26e6006248d090051284bc0eae7a68b17241ef8016909\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a7d7fad5efb67cc7ef796977f03cc9c0e69fcc4340ca28749fffcc13236ed8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:46Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:46 crc kubenswrapper[4911]: I0929 21:25:46.755361 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lrfbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1179c900-e866-4c5a-bb06-6032cc03a075\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bb16b2f889ccaf66b96ffea2f6585967c14530a95a95000b652ce817e97c37e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jwhfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lrfbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:46Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:46 crc kubenswrapper[4911]: I0929 21:25:46.797148 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e3aa70f-b0da-44c9-a850-96d4494b02fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9wxd8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:46Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:46 crc kubenswrapper[4911]: I0929 21:25:46.840169 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f56832a-7fd7-428e-85ee-55c5dea7b67a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ff3cf4013e69405637fe1483d5eb8ab2d12a0ba222c8446d93445dcb94ac9b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b32578d3e75bb839ad134f88850ba1b26e57846431fac71b64c5ddbce802464\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4539e6f57be0c686d5ea6ccbf1537c7c93698f81d2fdcfe5a143ce1f014e09\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57c9f68393ebed4fd5fe219544b0585a743989f8c54239e19ec702bddf9c30f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9dfa222360c949632050bd1dc5453a14c0dfa0d4614282132ceea2297405443e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T21:25:36Z\\\",\\\"message\\\":\\\"W0929 21:25:25.867198 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 21:25:25.867655 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759181125 cert, and key in /tmp/serving-cert-2091738680/serving-signer.crt, /tmp/serving-cert-2091738680/serving-signer.key\\\\nI0929 21:25:26.230899 1 observer_polling.go:159] Starting file observer\\\\nW0929 21:25:26.235864 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 21:25:26.236118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 21:25:26.238856 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2091738680/tls.crt::/tmp/serving-cert-2091738680/tls.key\\\\\\\"\\\\nF0929 21:25:36.606155 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47cf5d1e65f887e84d3c0694d5038edf3451c4af14d08a29d48562d05e36ea26\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:46Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:46 crc kubenswrapper[4911]: I0929 21:25:46.877629 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8646497-f7ad-4b38-a69f-9a14345af1c8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d3bb437bdac371dd4e22f20f304a5381c128ac2b774c51bc4f748a1c7aa3834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://129d51f84c8798ed7b1078f34899d4231c7bf4e1a75212f694c6c027b8463b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://290e37c77297fae7dcb3540911f6b2917e26daee3623c9106e34de394ccfa01f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7088f5a3752b8778de247eacb408d511197d1501e8e2811a98a9b713ca1fca04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:46Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:46 crc kubenswrapper[4911]: I0929 21:25:46.910418 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:46Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:46 crc kubenswrapper[4911]: I0929 21:25:46.918854 4911 generic.go:334] "Generic (PLEG): container finished" podID="364060da-3bac-4f3e-b8b8-a64b0441cb5e" containerID="7962008bbec5a6556b4ab76cf151a68e971bde07fdc5fe560c561f42cf597d38" exitCode=0 Sep 29 21:25:46 crc kubenswrapper[4911]: I0929 21:25:46.918957 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-bp485" event={"ID":"364060da-3bac-4f3e-b8b8-a64b0441cb5e","Type":"ContainerDied","Data":"7962008bbec5a6556b4ab76cf151a68e971bde07fdc5fe560c561f42cf597d38"} Sep 29 21:25:46 crc kubenswrapper[4911]: I0929 21:25:46.923234 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w647f" event={"ID":"50640abc-40db-4390-82d1-f3cfc76da71c","Type":"ContainerStarted","Data":"0479676aa753b6e60f9a57eb2a3055ddc5b10bce1547ac60e216c0865e79f24b"} Sep 29 21:25:46 crc kubenswrapper[4911]: I0929 21:25:46.923293 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w647f" event={"ID":"50640abc-40db-4390-82d1-f3cfc76da71c","Type":"ContainerStarted","Data":"568d099d7beea914cf9f9eb703e36dd179359d3f4406bae454334e39b0de79ca"} Sep 29 21:25:46 crc kubenswrapper[4911]: I0929 21:25:46.950384 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:46Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:46 crc kubenswrapper[4911]: I0929 21:25:46.990048 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:46Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:47 crc kubenswrapper[4911]: I0929 21:25:47.030113 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a92f3bb864cba0c6615e8a07233e923c0d0681e83ad6722aa7c7043fb5966d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:47Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:47 crc kubenswrapper[4911]: I0929 21:25:47.075920 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85fccbbf-637b-4195-89ba-828db0f10fb3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11c497bb84920c85649975dc5a3ba883723017d5503200e47c08427f45b9edfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a594d1677deac968c88fac4a64d4b047cf02ad8d6db74ee2ebe170d4135f588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2db0acd86c2dfcd0303626aa6b66832b18aec98dab908328288cdf0e0a18f24d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72188960402faa7956adf357824c6ea0a7015a75ed5191853ca95b12feb2cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf2588ddec2be0fa843630cdc268617c399a7a1615705cc4b088fc806031d557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0546d8270e2cbba06a241e421fa7ea130e53302a3f71f81829198d7ab8f3e6b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0546d8270e2cbba06a241e421fa7ea130e53302a3f71f81829198d7ab8f3e6b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123ea91d0a01f780d09196824c362eab68a4b8d943d414e80a3a0bbc2959e178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://123ea91d0a01f780d09196824c362eab68a4b8d943d414e80a3a0bbc2959e178\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://17751d3a9dd568ddb6c27efe08354f37aa4af7a91312e8c41e3e2e509d5be8ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://17751d3a9dd568ddb6c27efe08354f37aa4af7a91312e8c41e3e2e509d5be8ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:47Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:47 crc kubenswrapper[4911]: I0929 21:25:47.120172 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c6a0af8cc03379c6fa2a3969e5852a340c32bd8513dc7c963e074bb1694d436\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:47Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:47 crc kubenswrapper[4911]: I0929 21:25:47.148332 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dnhjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fe62a53-f0b8-4c66-8adf-3b9f8bef4195\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b11f03867ce6c678d04431a19903c21acd7665f0cca4c7b0344e9d1cf0e17e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64hpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dnhjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:47Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:47 crc kubenswrapper[4911]: I0929 21:25:47.192808 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bp485" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"364060da-3bac-4f3e-b8b8-a64b0441cb5e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7962008bbec5a6556b4ab76cf151a68e971bde07fdc5fe560c561f42cf597d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7962008bbec5a6556b4ab76cf151a68e971bde07fdc5fe560c561f42cf597d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bp485\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:47Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:47 crc kubenswrapper[4911]: I0929 21:25:47.226569 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dz6zq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c16af8d-e647-4820-b96f-298cce113ab1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e64c28e47bb861649d58b34f5e3c369cfaf8c808435b32a13ebfc2c02b7b6db9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbgg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dz6zq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:47Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:47 crc kubenswrapper[4911]: I0929 21:25:47.270802 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w647f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50640abc-40db-4390-82d1-f3cfc76da71c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0479676aa753b6e60f9a57eb2a3055ddc5b10bce1547ac60e216c0865e79f24b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://568d099d7beea914cf9f9eb703e36dd179359d3f4406bae454334e39b0de79ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w647f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:47Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:47 crc kubenswrapper[4911]: I0929 21:25:47.313866 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f56832a-7fd7-428e-85ee-55c5dea7b67a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ff3cf4013e69405637fe1483d5eb8ab2d12a0ba222c8446d93445dcb94ac9b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b32578d3e75bb839ad134f88850ba1b26e57846431fac71b64c5ddbce802464\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4539e6f57be0c686d5ea6ccbf1537c7c93698f81d2fdcfe5a143ce1f014e09\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57c9f68393ebed4fd5fe219544b0585a743989f8c54239e19ec702bddf9c30f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9dfa222360c949632050bd1dc5453a14c0dfa0d4614282132ceea2297405443e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T21:25:36Z\\\",\\\"message\\\":\\\"W0929 21:25:25.867198 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 21:25:25.867655 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759181125 cert, and key in /tmp/serving-cert-2091738680/serving-signer.crt, /tmp/serving-cert-2091738680/serving-signer.key\\\\nI0929 21:25:26.230899 1 observer_polling.go:159] Starting file observer\\\\nW0929 21:25:26.235864 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 21:25:26.236118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 21:25:26.238856 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2091738680/tls.crt::/tmp/serving-cert-2091738680/tls.key\\\\\\\"\\\\nF0929 21:25:36.606155 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47cf5d1e65f887e84d3c0694d5038edf3451c4af14d08a29d48562d05e36ea26\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:47Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:47 crc kubenswrapper[4911]: I0929 21:25:47.350996 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8646497-f7ad-4b38-a69f-9a14345af1c8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d3bb437bdac371dd4e22f20f304a5381c128ac2b774c51bc4f748a1c7aa3834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://129d51f84c8798ed7b1078f34899d4231c7bf4e1a75212f694c6c027b8463b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://290e37c77297fae7dcb3540911f6b2917e26daee3623c9106e34de394ccfa01f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7088f5a3752b8778de247eacb408d511197d1501e8e2811a98a9b713ca1fca04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:47Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:47 crc kubenswrapper[4911]: I0929 21:25:47.392977 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:47Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:47 crc kubenswrapper[4911]: I0929 21:25:47.434047 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:47Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:47 crc kubenswrapper[4911]: I0929 21:25:47.476227 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4052e710e0a06bf38d26e6006248d090051284bc0eae7a68b17241ef8016909\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a7d7fad5efb67cc7ef796977f03cc9c0e69fcc4340ca28749fffcc13236ed8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:47Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:47 crc kubenswrapper[4911]: I0929 21:25:47.511740 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lrfbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1179c900-e866-4c5a-bb06-6032cc03a075\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bb16b2f889ccaf66b96ffea2f6585967c14530a95a95000b652ce817e97c37e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jwhfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lrfbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:47Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:47 crc kubenswrapper[4911]: I0929 21:25:47.554745 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e3aa70f-b0da-44c9-a850-96d4494b02fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9wxd8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:47Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:47 crc kubenswrapper[4911]: I0929 21:25:47.592750 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:47Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:47 crc kubenswrapper[4911]: I0929 21:25:47.627703 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a92f3bb864cba0c6615e8a07233e923c0d0681e83ad6722aa7c7043fb5966d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:47Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:47 crc kubenswrapper[4911]: I0929 21:25:47.700530 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 21:25:47 crc kubenswrapper[4911]: E0929 21:25:47.700707 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 21:25:47 crc kubenswrapper[4911]: I0929 21:25:47.931190 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" event={"ID":"4e3aa70f-b0da-44c9-a850-96d4494b02fc","Type":"ContainerStarted","Data":"68ec54cc6e84d45b9550a2dcbd53b3ffc8cc97cb94da8d9961b29965af08b7e3"} Sep 29 21:25:47 crc kubenswrapper[4911]: I0929 21:25:47.933947 4911 generic.go:334] "Generic (PLEG): container finished" podID="364060da-3bac-4f3e-b8b8-a64b0441cb5e" containerID="3a6af280cf9196a9ed2a1798f7f4e7d6504ef6707c1d265fb9b232c02d70c205" exitCode=0 Sep 29 21:25:47 crc kubenswrapper[4911]: I0929 21:25:47.934021 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-bp485" event={"ID":"364060da-3bac-4f3e-b8b8-a64b0441cb5e","Type":"ContainerDied","Data":"3a6af280cf9196a9ed2a1798f7f4e7d6504ef6707c1d265fb9b232c02d70c205"} Sep 29 21:25:47 crc kubenswrapper[4911]: I0929 21:25:47.959201 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8646497-f7ad-4b38-a69f-9a14345af1c8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d3bb437bdac371dd4e22f20f304a5381c128ac2b774c51bc4f748a1c7aa3834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://129d51f84c8798ed7b1078f34899d4231c7bf4e1a75212f694c6c027b8463b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://290e37c77297fae7dcb3540911f6b2917e26daee3623c9106e34de394ccfa01f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7088f5a3752b8778de247eacb408d511197d1501e8e2811a98a9b713ca1fca04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:47Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:47 crc kubenswrapper[4911]: I0929 21:25:47.980446 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:47Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:47 crc kubenswrapper[4911]: I0929 21:25:47.999706 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:47Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.017878 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4052e710e0a06bf38d26e6006248d090051284bc0eae7a68b17241ef8016909\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a7d7fad5efb67cc7ef796977f03cc9c0e69fcc4340ca28749fffcc13236ed8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:48Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.033246 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lrfbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1179c900-e866-4c5a-bb06-6032cc03a075\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bb16b2f889ccaf66b96ffea2f6585967c14530a95a95000b652ce817e97c37e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jwhfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lrfbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:48Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.053462 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e3aa70f-b0da-44c9-a850-96d4494b02fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9wxd8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:48Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.068542 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f56832a-7fd7-428e-85ee-55c5dea7b67a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ff3cf4013e69405637fe1483d5eb8ab2d12a0ba222c8446d93445dcb94ac9b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b32578d3e75bb839ad134f88850ba1b26e57846431fac71b64c5ddbce802464\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4539e6f57be0c686d5ea6ccbf1537c7c93698f81d2fdcfe5a143ce1f014e09\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57c9f68393ebed4fd5fe219544b0585a743989f8c54239e19ec702bddf9c30f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9dfa222360c949632050bd1dc5453a14c0dfa0d4614282132ceea2297405443e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T21:25:36Z\\\",\\\"message\\\":\\\"W0929 21:25:25.867198 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 21:25:25.867655 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759181125 cert, and key in /tmp/serving-cert-2091738680/serving-signer.crt, /tmp/serving-cert-2091738680/serving-signer.key\\\\nI0929 21:25:26.230899 1 observer_polling.go:159] Starting file observer\\\\nW0929 21:25:26.235864 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 21:25:26.236118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 21:25:26.238856 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2091738680/tls.crt::/tmp/serving-cert-2091738680/tls.key\\\\\\\"\\\\nF0929 21:25:36.606155 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47cf5d1e65f887e84d3c0694d5038edf3451c4af14d08a29d48562d05e36ea26\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:48Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.082518 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:48Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.093887 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a92f3bb864cba0c6615e8a07233e923c0d0681e83ad6722aa7c7043fb5966d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:48Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.111753 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c6a0af8cc03379c6fa2a3969e5852a340c32bd8513dc7c963e074bb1694d436\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:48Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.124545 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dnhjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fe62a53-f0b8-4c66-8adf-3b9f8bef4195\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b11f03867ce6c678d04431a19903c21acd7665f0cca4c7b0344e9d1cf0e17e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64hpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dnhjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:48Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.141095 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bp485" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"364060da-3bac-4f3e-b8b8-a64b0441cb5e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7962008bbec5a6556b4ab76cf151a68e971bde07fdc5fe560c561f42cf597d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7962008bbec5a6556b4ab76cf151a68e971bde07fdc5fe560c561f42cf597d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a6af280cf9196a9ed2a1798f7f4e7d6504ef6707c1d265fb9b232c02d70c205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a6af280cf9196a9ed2a1798f7f4e7d6504ef6707c1d265fb9b232c02d70c205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bp485\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:48Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.153582 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dz6zq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c16af8d-e647-4820-b96f-298cce113ab1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e64c28e47bb861649d58b34f5e3c369cfaf8c808435b32a13ebfc2c02b7b6db9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbgg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dz6zq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:48Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.197128 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85fccbbf-637b-4195-89ba-828db0f10fb3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11c497bb84920c85649975dc5a3ba883723017d5503200e47c08427f45b9edfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a594d1677deac968c88fac4a64d4b047cf02ad8d6db74ee2ebe170d4135f588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2db0acd86c2dfcd0303626aa6b66832b18aec98dab908328288cdf0e0a18f24d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72188960402faa7956adf357824c6ea0a7015a75ed5191853ca95b12feb2cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf2588ddec2be0fa843630cdc268617c399a7a1615705cc4b088fc806031d557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0546d8270e2cbba06a241e421fa7ea130e53302a3f71f81829198d7ab8f3e6b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0546d8270e2cbba06a241e421fa7ea130e53302a3f71f81829198d7ab8f3e6b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123ea91d0a01f780d09196824c362eab68a4b8d943d414e80a3a0bbc2959e178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://123ea91d0a01f780d09196824c362eab68a4b8d943d414e80a3a0bbc2959e178\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://17751d3a9dd568ddb6c27efe08354f37aa4af7a91312e8c41e3e2e509d5be8ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://17751d3a9dd568ddb6c27efe08354f37aa4af7a91312e8c41e3e2e509d5be8ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:48Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.229230 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w647f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50640abc-40db-4390-82d1-f3cfc76da71c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0479676aa753b6e60f9a57eb2a3055ddc5b10bce1547ac60e216c0865e79f24b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://568d099d7beea914cf9f9eb703e36dd179359d3f4406bae454334e39b0de79ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w647f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:48Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.253404 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.256397 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.256453 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.256465 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.256622 4911 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.263113 4911 kubelet_node_status.go:115] "Node was previously registered" node="crc" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.263441 4911 kubelet_node_status.go:79] "Successfully registered node" node="crc" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.264662 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.264718 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.264732 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.264756 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.264772 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:48Z","lastTransitionTime":"2025-09-29T21:25:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:48 crc kubenswrapper[4911]: E0929 21:25:48.283031 4911 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:25:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:25:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:25:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:25:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7dded9b5-8ab5-45b2-be5a-4613b6e8208f\\\",\\\"systemUUID\\\":\\\"6cb362cf-0841-40fb-a840-f46642f78745\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:48Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.292194 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.292249 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.292260 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.292282 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.292299 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:48Z","lastTransitionTime":"2025-09-29T21:25:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:48 crc kubenswrapper[4911]: E0929 21:25:48.310824 4911 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:25:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:25:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:25:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:25:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7dded9b5-8ab5-45b2-be5a-4613b6e8208f\\\",\\\"systemUUID\\\":\\\"6cb362cf-0841-40fb-a840-f46642f78745\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:48Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.316649 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.316688 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.316702 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.316727 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.316741 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:48Z","lastTransitionTime":"2025-09-29T21:25:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:48 crc kubenswrapper[4911]: E0929 21:25:48.331840 4911 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:25:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:25:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:25:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:25:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7dded9b5-8ab5-45b2-be5a-4613b6e8208f\\\",\\\"systemUUID\\\":\\\"6cb362cf-0841-40fb-a840-f46642f78745\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:48Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.336241 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.336293 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.336305 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.336322 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.336333 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:48Z","lastTransitionTime":"2025-09-29T21:25:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:48 crc kubenswrapper[4911]: E0929 21:25:48.349521 4911 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:25:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:25:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:25:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:25:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7dded9b5-8ab5-45b2-be5a-4613b6e8208f\\\",\\\"systemUUID\\\":\\\"6cb362cf-0841-40fb-a840-f46642f78745\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:48Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.353394 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.353452 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.353463 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.353483 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.353495 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:48Z","lastTransitionTime":"2025-09-29T21:25:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:48 crc kubenswrapper[4911]: E0929 21:25:48.365348 4911 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:25:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:25:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:25:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:25:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7dded9b5-8ab5-45b2-be5a-4613b6e8208f\\\",\\\"systemUUID\\\":\\\"6cb362cf-0841-40fb-a840-f46642f78745\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:48Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:48 crc kubenswrapper[4911]: E0929 21:25:48.365460 4911 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.367236 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.367266 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.367277 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.367291 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.367302 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:48Z","lastTransitionTime":"2025-09-29T21:25:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.469727 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.469847 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.469876 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.469910 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.469935 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:48Z","lastTransitionTime":"2025-09-29T21:25:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.572946 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.573022 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.573052 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.573091 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.573116 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:48Z","lastTransitionTime":"2025-09-29T21:25:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.675919 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.675972 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.675981 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.676000 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.676011 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:48Z","lastTransitionTime":"2025-09-29T21:25:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.700561 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:25:48 crc kubenswrapper[4911]: E0929 21:25:48.700716 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.700564 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 21:25:48 crc kubenswrapper[4911]: E0929 21:25:48.700835 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.778480 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.778536 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.778545 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.778564 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.778577 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:48Z","lastTransitionTime":"2025-09-29T21:25:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.884058 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.884104 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.884120 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.884139 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.884151 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:48Z","lastTransitionTime":"2025-09-29T21:25:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.943814 4911 generic.go:334] "Generic (PLEG): container finished" podID="364060da-3bac-4f3e-b8b8-a64b0441cb5e" containerID="4783145a17a1233241a3766d9b259d16ed82b2076ff6790c1a368d713271cd91" exitCode=0 Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.943870 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-bp485" event={"ID":"364060da-3bac-4f3e-b8b8-a64b0441cb5e","Type":"ContainerDied","Data":"4783145a17a1233241a3766d9b259d16ed82b2076ff6790c1a368d713271cd91"} Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.965910 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e3aa70f-b0da-44c9-a850-96d4494b02fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9wxd8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:48Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.981542 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f56832a-7fd7-428e-85ee-55c5dea7b67a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ff3cf4013e69405637fe1483d5eb8ab2d12a0ba222c8446d93445dcb94ac9b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b32578d3e75bb839ad134f88850ba1b26e57846431fac71b64c5ddbce802464\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4539e6f57be0c686d5ea6ccbf1537c7c93698f81d2fdcfe5a143ce1f014e09\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57c9f68393ebed4fd5fe219544b0585a743989f8c54239e19ec702bddf9c30f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9dfa222360c949632050bd1dc5453a14c0dfa0d4614282132ceea2297405443e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T21:25:36Z\\\",\\\"message\\\":\\\"W0929 21:25:25.867198 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 21:25:25.867655 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759181125 cert, and key in /tmp/serving-cert-2091738680/serving-signer.crt, /tmp/serving-cert-2091738680/serving-signer.key\\\\nI0929 21:25:26.230899 1 observer_polling.go:159] Starting file observer\\\\nW0929 21:25:26.235864 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 21:25:26.236118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 21:25:26.238856 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2091738680/tls.crt::/tmp/serving-cert-2091738680/tls.key\\\\\\\"\\\\nF0929 21:25:36.606155 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47cf5d1e65f887e84d3c0694d5038edf3451c4af14d08a29d48562d05e36ea26\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:48Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.988514 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.988566 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.988575 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.988595 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.988606 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:48Z","lastTransitionTime":"2025-09-29T21:25:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:48 crc kubenswrapper[4911]: I0929 21:25:48.995375 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8646497-f7ad-4b38-a69f-9a14345af1c8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d3bb437bdac371dd4e22f20f304a5381c128ac2b774c51bc4f748a1c7aa3834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://129d51f84c8798ed7b1078f34899d4231c7bf4e1a75212f694c6c027b8463b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://290e37c77297fae7dcb3540911f6b2917e26daee3623c9106e34de394ccfa01f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7088f5a3752b8778de247eacb408d511197d1501e8e2811a98a9b713ca1fca04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:48Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.010976 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:49Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.023610 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:49Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.036962 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4052e710e0a06bf38d26e6006248d090051284bc0eae7a68b17241ef8016909\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a7d7fad5efb67cc7ef796977f03cc9c0e69fcc4340ca28749fffcc13236ed8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:49Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.049964 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lrfbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1179c900-e866-4c5a-bb06-6032cc03a075\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bb16b2f889ccaf66b96ffea2f6585967c14530a95a95000b652ce817e97c37e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jwhfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lrfbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:49Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.063969 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:49Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.075918 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a92f3bb864cba0c6615e8a07233e923c0d0681e83ad6722aa7c7043fb5966d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:49Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.093054 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.093479 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.093491 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.093510 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.093523 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:49Z","lastTransitionTime":"2025-09-29T21:25:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.098057 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85fccbbf-637b-4195-89ba-828db0f10fb3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11c497bb84920c85649975dc5a3ba883723017d5503200e47c08427f45b9edfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a594d1677deac968c88fac4a64d4b047cf02ad8d6db74ee2ebe170d4135f588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2db0acd86c2dfcd0303626aa6b66832b18aec98dab908328288cdf0e0a18f24d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72188960402faa7956adf357824c6ea0a7015a75ed5191853ca95b12feb2cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf2588ddec2be0fa843630cdc268617c399a7a1615705cc4b088fc806031d557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0546d8270e2cbba06a241e421fa7ea130e53302a3f71f81829198d7ab8f3e6b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0546d8270e2cbba06a241e421fa7ea130e53302a3f71f81829198d7ab8f3e6b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123ea91d0a01f780d09196824c362eab68a4b8d943d414e80a3a0bbc2959e178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://123ea91d0a01f780d09196824c362eab68a4b8d943d414e80a3a0bbc2959e178\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://17751d3a9dd568ddb6c27efe08354f37aa4af7a91312e8c41e3e2e509d5be8ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://17751d3a9dd568ddb6c27efe08354f37aa4af7a91312e8c41e3e2e509d5be8ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:49Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.114273 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c6a0af8cc03379c6fa2a3969e5852a340c32bd8513dc7c963e074bb1694d436\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:49Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.125661 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dnhjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fe62a53-f0b8-4c66-8adf-3b9f8bef4195\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b11f03867ce6c678d04431a19903c21acd7665f0cca4c7b0344e9d1cf0e17e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64hpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dnhjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:49Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.141007 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bp485" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"364060da-3bac-4f3e-b8b8-a64b0441cb5e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7962008bbec5a6556b4ab76cf151a68e971bde07fdc5fe560c561f42cf597d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7962008bbec5a6556b4ab76cf151a68e971bde07fdc5fe560c561f42cf597d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a6af280cf9196a9ed2a1798f7f4e7d6504ef6707c1d265fb9b232c02d70c205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a6af280cf9196a9ed2a1798f7f4e7d6504ef6707c1d265fb9b232c02d70c205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4783145a17a1233241a3766d9b259d16ed82b2076ff6790c1a368d713271cd91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4783145a17a1233241a3766d9b259d16ed82b2076ff6790c1a368d713271cd91\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bp485\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:49Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.153440 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dz6zq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c16af8d-e647-4820-b96f-298cce113ab1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e64c28e47bb861649d58b34f5e3c369cfaf8c808435b32a13ebfc2c02b7b6db9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbgg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dz6zq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:49Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.166897 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w647f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50640abc-40db-4390-82d1-f3cfc76da71c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0479676aa753b6e60f9a57eb2a3055ddc5b10bce1547ac60e216c0865e79f24b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://568d099d7beea914cf9f9eb703e36dd179359d3f4406bae454334e39b0de79ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w647f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:49Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.195975 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.196021 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.196030 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.196049 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.196061 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:49Z","lastTransitionTime":"2025-09-29T21:25:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.298917 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.298963 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.298975 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.298991 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.299002 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:49Z","lastTransitionTime":"2025-09-29T21:25:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.401495 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.401549 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.401559 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.401577 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.401589 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:49Z","lastTransitionTime":"2025-09-29T21:25:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.476950 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.492088 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:49Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.504103 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.504144 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.504154 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.504172 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.504183 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:49Z","lastTransitionTime":"2025-09-29T21:25:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.505633 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a92f3bb864cba0c6615e8a07233e923c0d0681e83ad6722aa7c7043fb5966d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:49Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.520592 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c6a0af8cc03379c6fa2a3969e5852a340c32bd8513dc7c963e074bb1694d436\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:49Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.531089 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dnhjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fe62a53-f0b8-4c66-8adf-3b9f8bef4195\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b11f03867ce6c678d04431a19903c21acd7665f0cca4c7b0344e9d1cf0e17e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64hpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dnhjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:49Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.544883 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bp485" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"364060da-3bac-4f3e-b8b8-a64b0441cb5e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7962008bbec5a6556b4ab76cf151a68e971bde07fdc5fe560c561f42cf597d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7962008bbec5a6556b4ab76cf151a68e971bde07fdc5fe560c561f42cf597d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a6af280cf9196a9ed2a1798f7f4e7d6504ef6707c1d265fb9b232c02d70c205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a6af280cf9196a9ed2a1798f7f4e7d6504ef6707c1d265fb9b232c02d70c205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4783145a17a1233241a3766d9b259d16ed82b2076ff6790c1a368d713271cd91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4783145a17a1233241a3766d9b259d16ed82b2076ff6790c1a368d713271cd91\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bp485\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:49Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.557691 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dz6zq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c16af8d-e647-4820-b96f-298cce113ab1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e64c28e47bb861649d58b34f5e3c369cfaf8c808435b32a13ebfc2c02b7b6db9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbgg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dz6zq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:49Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.577071 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85fccbbf-637b-4195-89ba-828db0f10fb3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11c497bb84920c85649975dc5a3ba883723017d5503200e47c08427f45b9edfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a594d1677deac968c88fac4a64d4b047cf02ad8d6db74ee2ebe170d4135f588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2db0acd86c2dfcd0303626aa6b66832b18aec98dab908328288cdf0e0a18f24d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72188960402faa7956adf357824c6ea0a7015a75ed5191853ca95b12feb2cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf2588ddec2be0fa843630cdc268617c399a7a1615705cc4b088fc806031d557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0546d8270e2cbba06a241e421fa7ea130e53302a3f71f81829198d7ab8f3e6b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0546d8270e2cbba06a241e421fa7ea130e53302a3f71f81829198d7ab8f3e6b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123ea91d0a01f780d09196824c362eab68a4b8d943d414e80a3a0bbc2959e178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://123ea91d0a01f780d09196824c362eab68a4b8d943d414e80a3a0bbc2959e178\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://17751d3a9dd568ddb6c27efe08354f37aa4af7a91312e8c41e3e2e509d5be8ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://17751d3a9dd568ddb6c27efe08354f37aa4af7a91312e8c41e3e2e509d5be8ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:49Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.588303 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w647f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50640abc-40db-4390-82d1-f3cfc76da71c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0479676aa753b6e60f9a57eb2a3055ddc5b10bce1547ac60e216c0865e79f24b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://568d099d7beea914cf9f9eb703e36dd179359d3f4406bae454334e39b0de79ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w647f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:49Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.601541 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8646497-f7ad-4b38-a69f-9a14345af1c8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d3bb437bdac371dd4e22f20f304a5381c128ac2b774c51bc4f748a1c7aa3834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://129d51f84c8798ed7b1078f34899d4231c7bf4e1a75212f694c6c027b8463b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://290e37c77297fae7dcb3540911f6b2917e26daee3623c9106e34de394ccfa01f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7088f5a3752b8778de247eacb408d511197d1501e8e2811a98a9b713ca1fca04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:49Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.607727 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.607869 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.607942 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.608023 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.608086 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:49Z","lastTransitionTime":"2025-09-29T21:25:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.613089 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:49Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.624705 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:49Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.638040 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4052e710e0a06bf38d26e6006248d090051284bc0eae7a68b17241ef8016909\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a7d7fad5efb67cc7ef796977f03cc9c0e69fcc4340ca28749fffcc13236ed8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:49Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.652249 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lrfbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1179c900-e866-4c5a-bb06-6032cc03a075\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bb16b2f889ccaf66b96ffea2f6585967c14530a95a95000b652ce817e97c37e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jwhfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lrfbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:49Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.659351 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:25:49 crc kubenswrapper[4911]: E0929 21:25:49.659711 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 21:25:57.659666627 +0000 UTC m=+35.636779338 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.669493 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e3aa70f-b0da-44c9-a850-96d4494b02fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9wxd8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:49Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.682714 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f56832a-7fd7-428e-85ee-55c5dea7b67a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ff3cf4013e69405637fe1483d5eb8ab2d12a0ba222c8446d93445dcb94ac9b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b32578d3e75bb839ad134f88850ba1b26e57846431fac71b64c5ddbce802464\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4539e6f57be0c686d5ea6ccbf1537c7c93698f81d2fdcfe5a143ce1f014e09\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57c9f68393ebed4fd5fe219544b0585a743989f8c54239e19ec702bddf9c30f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9dfa222360c949632050bd1dc5453a14c0dfa0d4614282132ceea2297405443e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T21:25:36Z\\\",\\\"message\\\":\\\"W0929 21:25:25.867198 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 21:25:25.867655 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759181125 cert, and key in /tmp/serving-cert-2091738680/serving-signer.crt, /tmp/serving-cert-2091738680/serving-signer.key\\\\nI0929 21:25:26.230899 1 observer_polling.go:159] Starting file observer\\\\nW0929 21:25:26.235864 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 21:25:26.236118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 21:25:26.238856 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2091738680/tls.crt::/tmp/serving-cert-2091738680/tls.key\\\\\\\"\\\\nF0929 21:25:36.606155 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47cf5d1e65f887e84d3c0694d5038edf3451c4af14d08a29d48562d05e36ea26\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:49Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.701098 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 21:25:49 crc kubenswrapper[4911]: E0929 21:25:49.701304 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.710883 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.710956 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.710975 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.711002 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.711021 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:49Z","lastTransitionTime":"2025-09-29T21:25:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.786847 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.786924 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.786965 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.786993 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 21:25:49 crc kubenswrapper[4911]: E0929 21:25:49.787033 4911 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 21:25:49 crc kubenswrapper[4911]: E0929 21:25:49.787071 4911 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 21:25:49 crc kubenswrapper[4911]: E0929 21:25:49.787084 4911 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 21:25:49 crc kubenswrapper[4911]: E0929 21:25:49.787157 4911 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 21:25:49 crc kubenswrapper[4911]: E0929 21:25:49.787183 4911 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 21:25:49 crc kubenswrapper[4911]: E0929 21:25:49.787199 4911 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 21:25:49 crc kubenswrapper[4911]: E0929 21:25:49.787281 4911 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 21:25:49 crc kubenswrapper[4911]: E0929 21:25:49.787358 4911 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 21:25:49 crc kubenswrapper[4911]: E0929 21:25:49.787168 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-29 21:25:57.787137422 +0000 UTC m=+35.764250293 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 21:25:49 crc kubenswrapper[4911]: E0929 21:25:49.787416 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-29 21:25:57.78739341 +0000 UTC m=+35.764506301 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 21:25:49 crc kubenswrapper[4911]: E0929 21:25:49.787431 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 21:25:57.78742389 +0000 UTC m=+35.764536821 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 21:25:49 crc kubenswrapper[4911]: E0929 21:25:49.787446 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 21:25:57.787437891 +0000 UTC m=+35.764550802 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.813310 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.813362 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.813374 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.813398 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.813412 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:49Z","lastTransitionTime":"2025-09-29T21:25:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.916973 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.917267 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.917380 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.917706 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.917894 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:49Z","lastTransitionTime":"2025-09-29T21:25:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.950778 4911 generic.go:334] "Generic (PLEG): container finished" podID="364060da-3bac-4f3e-b8b8-a64b0441cb5e" containerID="e0bb5fec15ae4eb585d8ba1c0bedabe62a4094ca50044a09b44439dc555f064d" exitCode=0 Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.950838 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-bp485" event={"ID":"364060da-3bac-4f3e-b8b8-a64b0441cb5e","Type":"ContainerDied","Data":"e0bb5fec15ae4eb585d8ba1c0bedabe62a4094ca50044a09b44439dc555f064d"} Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.970276 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:49Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:49 crc kubenswrapper[4911]: I0929 21:25:49.984918 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a92f3bb864cba0c6615e8a07233e923c0d0681e83ad6722aa7c7043fb5966d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:49Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:50 crc kubenswrapper[4911]: I0929 21:25:50.018443 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85fccbbf-637b-4195-89ba-828db0f10fb3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11c497bb84920c85649975dc5a3ba883723017d5503200e47c08427f45b9edfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a594d1677deac968c88fac4a64d4b047cf02ad8d6db74ee2ebe170d4135f588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2db0acd86c2dfcd0303626aa6b66832b18aec98dab908328288cdf0e0a18f24d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72188960402faa7956adf357824c6ea0a7015a75ed5191853ca95b12feb2cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf2588ddec2be0fa843630cdc268617c399a7a1615705cc4b088fc806031d557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0546d8270e2cbba06a241e421fa7ea130e53302a3f71f81829198d7ab8f3e6b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0546d8270e2cbba06a241e421fa7ea130e53302a3f71f81829198d7ab8f3e6b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123ea91d0a01f780d09196824c362eab68a4b8d943d414e80a3a0bbc2959e178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://123ea91d0a01f780d09196824c362eab68a4b8d943d414e80a3a0bbc2959e178\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://17751d3a9dd568ddb6c27efe08354f37aa4af7a91312e8c41e3e2e509d5be8ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://17751d3a9dd568ddb6c27efe08354f37aa4af7a91312e8c41e3e2e509d5be8ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:50Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:50 crc kubenswrapper[4911]: I0929 21:25:50.020894 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:50 crc kubenswrapper[4911]: I0929 21:25:50.020932 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:50 crc kubenswrapper[4911]: I0929 21:25:50.020945 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:50 crc kubenswrapper[4911]: I0929 21:25:50.020963 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:50 crc kubenswrapper[4911]: I0929 21:25:50.020976 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:50Z","lastTransitionTime":"2025-09-29T21:25:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:50 crc kubenswrapper[4911]: I0929 21:25:50.032130 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c6a0af8cc03379c6fa2a3969e5852a340c32bd8513dc7c963e074bb1694d436\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:50Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:50 crc kubenswrapper[4911]: I0929 21:25:50.041611 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dnhjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fe62a53-f0b8-4c66-8adf-3b9f8bef4195\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b11f03867ce6c678d04431a19903c21acd7665f0cca4c7b0344e9d1cf0e17e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64hpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dnhjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:50Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:50 crc kubenswrapper[4911]: I0929 21:25:50.055878 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bp485" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"364060da-3bac-4f3e-b8b8-a64b0441cb5e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7962008bbec5a6556b4ab76cf151a68e971bde07fdc5fe560c561f42cf597d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7962008bbec5a6556b4ab76cf151a68e971bde07fdc5fe560c561f42cf597d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a6af280cf9196a9ed2a1798f7f4e7d6504ef6707c1d265fb9b232c02d70c205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a6af280cf9196a9ed2a1798f7f4e7d6504ef6707c1d265fb9b232c02d70c205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4783145a17a1233241a3766d9b259d16ed82b2076ff6790c1a368d713271cd91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4783145a17a1233241a3766d9b259d16ed82b2076ff6790c1a368d713271cd91\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0bb5fec15ae4eb585d8ba1c0bedabe62a4094ca50044a09b44439dc555f064d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0bb5fec15ae4eb585d8ba1c0bedabe62a4094ca50044a09b44439dc555f064d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bp485\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:50Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:50 crc kubenswrapper[4911]: I0929 21:25:50.067911 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dz6zq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c16af8d-e647-4820-b96f-298cce113ab1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e64c28e47bb861649d58b34f5e3c369cfaf8c808435b32a13ebfc2c02b7b6db9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbgg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dz6zq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:50Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:50 crc kubenswrapper[4911]: I0929 21:25:50.080907 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w647f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50640abc-40db-4390-82d1-f3cfc76da71c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0479676aa753b6e60f9a57eb2a3055ddc5b10bce1547ac60e216c0865e79f24b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://568d099d7beea914cf9f9eb703e36dd179359d3f4406bae454334e39b0de79ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w647f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:50Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:50 crc kubenswrapper[4911]: I0929 21:25:50.106105 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e3aa70f-b0da-44c9-a850-96d4494b02fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9wxd8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:50Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:50 crc kubenswrapper[4911]: I0929 21:25:50.122520 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f56832a-7fd7-428e-85ee-55c5dea7b67a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ff3cf4013e69405637fe1483d5eb8ab2d12a0ba222c8446d93445dcb94ac9b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b32578d3e75bb839ad134f88850ba1b26e57846431fac71b64c5ddbce802464\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4539e6f57be0c686d5ea6ccbf1537c7c93698f81d2fdcfe5a143ce1f014e09\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57c9f68393ebed4fd5fe219544b0585a743989f8c54239e19ec702bddf9c30f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9dfa222360c949632050bd1dc5453a14c0dfa0d4614282132ceea2297405443e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T21:25:36Z\\\",\\\"message\\\":\\\"W0929 21:25:25.867198 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 21:25:25.867655 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759181125 cert, and key in /tmp/serving-cert-2091738680/serving-signer.crt, /tmp/serving-cert-2091738680/serving-signer.key\\\\nI0929 21:25:26.230899 1 observer_polling.go:159] Starting file observer\\\\nW0929 21:25:26.235864 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 21:25:26.236118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 21:25:26.238856 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2091738680/tls.crt::/tmp/serving-cert-2091738680/tls.key\\\\\\\"\\\\nF0929 21:25:36.606155 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47cf5d1e65f887e84d3c0694d5038edf3451c4af14d08a29d48562d05e36ea26\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:50Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:50 crc kubenswrapper[4911]: I0929 21:25:50.124634 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:50 crc kubenswrapper[4911]: I0929 21:25:50.124667 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:50 crc kubenswrapper[4911]: I0929 21:25:50.124678 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:50 crc kubenswrapper[4911]: I0929 21:25:50.124694 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:50 crc kubenswrapper[4911]: I0929 21:25:50.124706 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:50Z","lastTransitionTime":"2025-09-29T21:25:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:50 crc kubenswrapper[4911]: I0929 21:25:50.137530 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8646497-f7ad-4b38-a69f-9a14345af1c8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d3bb437bdac371dd4e22f20f304a5381c128ac2b774c51bc4f748a1c7aa3834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://129d51f84c8798ed7b1078f34899d4231c7bf4e1a75212f694c6c027b8463b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://290e37c77297fae7dcb3540911f6b2917e26daee3623c9106e34de394ccfa01f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7088f5a3752b8778de247eacb408d511197d1501e8e2811a98a9b713ca1fca04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:50Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:50 crc kubenswrapper[4911]: I0929 21:25:50.150337 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:50Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:50 crc kubenswrapper[4911]: I0929 21:25:50.164457 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:50Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:50 crc kubenswrapper[4911]: I0929 21:25:50.178974 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4052e710e0a06bf38d26e6006248d090051284bc0eae7a68b17241ef8016909\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a7d7fad5efb67cc7ef796977f03cc9c0e69fcc4340ca28749fffcc13236ed8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:50Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:50 crc kubenswrapper[4911]: I0929 21:25:50.193967 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lrfbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1179c900-e866-4c5a-bb06-6032cc03a075\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bb16b2f889ccaf66b96ffea2f6585967c14530a95a95000b652ce817e97c37e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jwhfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lrfbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:50Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:50 crc kubenswrapper[4911]: I0929 21:25:50.227500 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:50 crc kubenswrapper[4911]: I0929 21:25:50.227564 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:50 crc kubenswrapper[4911]: I0929 21:25:50.227579 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:50 crc kubenswrapper[4911]: I0929 21:25:50.227600 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:50 crc kubenswrapper[4911]: I0929 21:25:50.227613 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:50Z","lastTransitionTime":"2025-09-29T21:25:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:50 crc kubenswrapper[4911]: I0929 21:25:50.330538 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:50 crc kubenswrapper[4911]: I0929 21:25:50.330590 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:50 crc kubenswrapper[4911]: I0929 21:25:50.330603 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:50 crc kubenswrapper[4911]: I0929 21:25:50.330627 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:50 crc kubenswrapper[4911]: I0929 21:25:50.330641 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:50Z","lastTransitionTime":"2025-09-29T21:25:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:50 crc kubenswrapper[4911]: I0929 21:25:50.433710 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:50 crc kubenswrapper[4911]: I0929 21:25:50.433753 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:50 crc kubenswrapper[4911]: I0929 21:25:50.433764 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:50 crc kubenswrapper[4911]: I0929 21:25:50.433780 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:50 crc kubenswrapper[4911]: I0929 21:25:50.433811 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:50Z","lastTransitionTime":"2025-09-29T21:25:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:50 crc kubenswrapper[4911]: I0929 21:25:50.538467 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:50 crc kubenswrapper[4911]: I0929 21:25:50.538697 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:50 crc kubenswrapper[4911]: I0929 21:25:50.538708 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:50 crc kubenswrapper[4911]: I0929 21:25:50.538730 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:50 crc kubenswrapper[4911]: I0929 21:25:50.538742 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:50Z","lastTransitionTime":"2025-09-29T21:25:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:50 crc kubenswrapper[4911]: I0929 21:25:50.641513 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:50 crc kubenswrapper[4911]: I0929 21:25:50.641559 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:50 crc kubenswrapper[4911]: I0929 21:25:50.641571 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:50 crc kubenswrapper[4911]: I0929 21:25:50.641590 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:50 crc kubenswrapper[4911]: I0929 21:25:50.641601 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:50Z","lastTransitionTime":"2025-09-29T21:25:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:50 crc kubenswrapper[4911]: I0929 21:25:50.699989 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 21:25:50 crc kubenswrapper[4911]: I0929 21:25:50.700062 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:25:50 crc kubenswrapper[4911]: E0929 21:25:50.700141 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 21:25:50 crc kubenswrapper[4911]: E0929 21:25:50.700667 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 21:25:50 crc kubenswrapper[4911]: I0929 21:25:50.744645 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:50 crc kubenswrapper[4911]: I0929 21:25:50.744698 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:50 crc kubenswrapper[4911]: I0929 21:25:50.744709 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:50 crc kubenswrapper[4911]: I0929 21:25:50.744725 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:50 crc kubenswrapper[4911]: I0929 21:25:50.744738 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:50Z","lastTransitionTime":"2025-09-29T21:25:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:50 crc kubenswrapper[4911]: I0929 21:25:50.848682 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:50 crc kubenswrapper[4911]: I0929 21:25:50.848734 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:50 crc kubenswrapper[4911]: I0929 21:25:50.848745 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:50 crc kubenswrapper[4911]: I0929 21:25:50.848765 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:50 crc kubenswrapper[4911]: I0929 21:25:50.848778 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:50Z","lastTransitionTime":"2025-09-29T21:25:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:50 crc kubenswrapper[4911]: I0929 21:25:50.951149 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:50 crc kubenswrapper[4911]: I0929 21:25:50.951190 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:50 crc kubenswrapper[4911]: I0929 21:25:50.951203 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:50 crc kubenswrapper[4911]: I0929 21:25:50.951220 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:50 crc kubenswrapper[4911]: I0929 21:25:50.951234 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:50Z","lastTransitionTime":"2025-09-29T21:25:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:50 crc kubenswrapper[4911]: I0929 21:25:50.963376 4911 generic.go:334] "Generic (PLEG): container finished" podID="364060da-3bac-4f3e-b8b8-a64b0441cb5e" containerID="c153df9063b1a1e897e8d02e139d641d00d84dce42004be4c69dd55d44e7436b" exitCode=0 Sep 29 21:25:50 crc kubenswrapper[4911]: I0929 21:25:50.963463 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-bp485" event={"ID":"364060da-3bac-4f3e-b8b8-a64b0441cb5e","Type":"ContainerDied","Data":"c153df9063b1a1e897e8d02e139d641d00d84dce42004be4c69dd55d44e7436b"} Sep 29 21:25:50 crc kubenswrapper[4911]: I0929 21:25:50.975515 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" event={"ID":"4e3aa70f-b0da-44c9-a850-96d4494b02fc","Type":"ContainerStarted","Data":"412f7b8119d990462b71c2e7168c9940ca770637f085b4767c75af182ecba2de"} Sep 29 21:25:50 crc kubenswrapper[4911]: I0929 21:25:50.976630 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" Sep 29 21:25:50 crc kubenswrapper[4911]: I0929 21:25:50.976739 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" Sep 29 21:25:50 crc kubenswrapper[4911]: I0929 21:25:50.981485 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w647f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50640abc-40db-4390-82d1-f3cfc76da71c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0479676aa753b6e60f9a57eb2a3055ddc5b10bce1547ac60e216c0865e79f24b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://568d099d7beea914cf9f9eb703e36dd179359d3f4406bae454334e39b0de79ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w647f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:50Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:50 crc kubenswrapper[4911]: I0929 21:25:50.996308 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lrfbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1179c900-e866-4c5a-bb06-6032cc03a075\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bb16b2f889ccaf66b96ffea2f6585967c14530a95a95000b652ce817e97c37e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jwhfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lrfbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:50Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.015399 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e3aa70f-b0da-44c9-a850-96d4494b02fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9wxd8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:51Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.019767 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.025324 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.035678 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f56832a-7fd7-428e-85ee-55c5dea7b67a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ff3cf4013e69405637fe1483d5eb8ab2d12a0ba222c8446d93445dcb94ac9b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b32578d3e75bb839ad134f88850ba1b26e57846431fac71b64c5ddbce802464\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4539e6f57be0c686d5ea6ccbf1537c7c93698f81d2fdcfe5a143ce1f014e09\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57c9f68393ebed4fd5fe219544b0585a743989f8c54239e19ec702bddf9c30f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9dfa222360c949632050bd1dc5453a14c0dfa0d4614282132ceea2297405443e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T21:25:36Z\\\",\\\"message\\\":\\\"W0929 21:25:25.867198 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 21:25:25.867655 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759181125 cert, and key in /tmp/serving-cert-2091738680/serving-signer.crt, /tmp/serving-cert-2091738680/serving-signer.key\\\\nI0929 21:25:26.230899 1 observer_polling.go:159] Starting file observer\\\\nW0929 21:25:26.235864 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 21:25:26.236118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 21:25:26.238856 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2091738680/tls.crt::/tmp/serving-cert-2091738680/tls.key\\\\\\\"\\\\nF0929 21:25:36.606155 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47cf5d1e65f887e84d3c0694d5038edf3451c4af14d08a29d48562d05e36ea26\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:51Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.051326 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8646497-f7ad-4b38-a69f-9a14345af1c8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d3bb437bdac371dd4e22f20f304a5381c128ac2b774c51bc4f748a1c7aa3834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://129d51f84c8798ed7b1078f34899d4231c7bf4e1a75212f694c6c027b8463b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://290e37c77297fae7dcb3540911f6b2917e26daee3623c9106e34de394ccfa01f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7088f5a3752b8778de247eacb408d511197d1501e8e2811a98a9b713ca1fca04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:51Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.054011 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.054035 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.054043 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.054058 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.054070 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:51Z","lastTransitionTime":"2025-09-29T21:25:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.066095 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:51Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.079885 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:51Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.093911 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4052e710e0a06bf38d26e6006248d090051284bc0eae7a68b17241ef8016909\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a7d7fad5efb67cc7ef796977f03cc9c0e69fcc4340ca28749fffcc13236ed8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:51Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.131144 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:51Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.156175 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.156229 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.156239 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.156261 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.156274 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:51Z","lastTransitionTime":"2025-09-29T21:25:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.160245 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a92f3bb864cba0c6615e8a07233e923c0d0681e83ad6722aa7c7043fb5966d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:51Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.187150 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85fccbbf-637b-4195-89ba-828db0f10fb3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11c497bb84920c85649975dc5a3ba883723017d5503200e47c08427f45b9edfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a594d1677deac968c88fac4a64d4b047cf02ad8d6db74ee2ebe170d4135f588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2db0acd86c2dfcd0303626aa6b66832b18aec98dab908328288cdf0e0a18f24d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72188960402faa7956adf357824c6ea0a7015a75ed5191853ca95b12feb2cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf2588ddec2be0fa843630cdc268617c399a7a1615705cc4b088fc806031d557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0546d8270e2cbba06a241e421fa7ea130e53302a3f71f81829198d7ab8f3e6b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0546d8270e2cbba06a241e421fa7ea130e53302a3f71f81829198d7ab8f3e6b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123ea91d0a01f780d09196824c362eab68a4b8d943d414e80a3a0bbc2959e178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://123ea91d0a01f780d09196824c362eab68a4b8d943d414e80a3a0bbc2959e178\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://17751d3a9dd568ddb6c27efe08354f37aa4af7a91312e8c41e3e2e509d5be8ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://17751d3a9dd568ddb6c27efe08354f37aa4af7a91312e8c41e3e2e509d5be8ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:51Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.211322 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c6a0af8cc03379c6fa2a3969e5852a340c32bd8513dc7c963e074bb1694d436\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:51Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.221081 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dnhjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fe62a53-f0b8-4c66-8adf-3b9f8bef4195\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b11f03867ce6c678d04431a19903c21acd7665f0cca4c7b0344e9d1cf0e17e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64hpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dnhjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:51Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.239852 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bp485" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"364060da-3bac-4f3e-b8b8-a64b0441cb5e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7962008bbec5a6556b4ab76cf151a68e971bde07fdc5fe560c561f42cf597d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7962008bbec5a6556b4ab76cf151a68e971bde07fdc5fe560c561f42cf597d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a6af280cf9196a9ed2a1798f7f4e7d6504ef6707c1d265fb9b232c02d70c205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a6af280cf9196a9ed2a1798f7f4e7d6504ef6707c1d265fb9b232c02d70c205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4783145a17a1233241a3766d9b259d16ed82b2076ff6790c1a368d713271cd91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4783145a17a1233241a3766d9b259d16ed82b2076ff6790c1a368d713271cd91\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0bb5fec15ae4eb585d8ba1c0bedabe62a4094ca50044a09b44439dc555f064d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0bb5fec15ae4eb585d8ba1c0bedabe62a4094ca50044a09b44439dc555f064d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c153df9063b1a1e897e8d02e139d641d00d84dce42004be4c69dd55d44e7436b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c153df9063b1a1e897e8d02e139d641d00d84dce42004be4c69dd55d44e7436b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bp485\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:51Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.255280 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dz6zq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c16af8d-e647-4820-b96f-298cce113ab1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e64c28e47bb861649d58b34f5e3c369cfaf8c808435b32a13ebfc2c02b7b6db9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbgg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dz6zq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:51Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.259610 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.259657 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.259669 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.259692 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.259720 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:51Z","lastTransitionTime":"2025-09-29T21:25:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.265455 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w647f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50640abc-40db-4390-82d1-f3cfc76da71c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0479676aa753b6e60f9a57eb2a3055ddc5b10bce1547ac60e216c0865e79f24b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://568d099d7beea914cf9f9eb703e36dd179359d3f4406bae454334e39b0de79ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w647f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:51Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.280394 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4052e710e0a06bf38d26e6006248d090051284bc0eae7a68b17241ef8016909\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a7d7fad5efb67cc7ef796977f03cc9c0e69fcc4340ca28749fffcc13236ed8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:51Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.292411 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lrfbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1179c900-e866-4c5a-bb06-6032cc03a075\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bb16b2f889ccaf66b96ffea2f6585967c14530a95a95000b652ce817e97c37e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jwhfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lrfbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:51Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.313496 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e3aa70f-b0da-44c9-a850-96d4494b02fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3568f71a4a1762e0d9be8ce4c317471f3f99a7c5b92b7daeb9fbace778f7e66b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00681163bfe474dabf977e0c43ac71148c2a472efd4bc57f3d145ba1134a74aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db1941e9959584d925b6ac546c234ce0dfaa2960a1277f282b51deb5ee56ec87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e87c1e61cde26c6ae4496e13ebe8e46857c8656720399f3d89b9e9a75bb4681d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c06f6555e780f2f9f69cabcd3e26595acfde3e973387ed7b10345c673c17f5d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c62aa953604f4ce35ca351ecf4e03da067af96f004c3ba3ea5f3a54b89def33d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://412f7b8119d990462b71c2e7168c9940ca770637f085b4767c75af182ecba2de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68ec54cc6e84d45b9550a2dcbd53b3ffc8cc97cb94da8d9961b29965af08b7e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9wxd8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:51Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.329235 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f56832a-7fd7-428e-85ee-55c5dea7b67a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ff3cf4013e69405637fe1483d5eb8ab2d12a0ba222c8446d93445dcb94ac9b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b32578d3e75bb839ad134f88850ba1b26e57846431fac71b64c5ddbce802464\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4539e6f57be0c686d5ea6ccbf1537c7c93698f81d2fdcfe5a143ce1f014e09\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57c9f68393ebed4fd5fe219544b0585a743989f8c54239e19ec702bddf9c30f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9dfa222360c949632050bd1dc5453a14c0dfa0d4614282132ceea2297405443e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T21:25:36Z\\\",\\\"message\\\":\\\"W0929 21:25:25.867198 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 21:25:25.867655 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759181125 cert, and key in /tmp/serving-cert-2091738680/serving-signer.crt, /tmp/serving-cert-2091738680/serving-signer.key\\\\nI0929 21:25:26.230899 1 observer_polling.go:159] Starting file observer\\\\nW0929 21:25:26.235864 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 21:25:26.236118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 21:25:26.238856 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2091738680/tls.crt::/tmp/serving-cert-2091738680/tls.key\\\\\\\"\\\\nF0929 21:25:36.606155 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47cf5d1e65f887e84d3c0694d5038edf3451c4af14d08a29d48562d05e36ea26\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:51Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.342994 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8646497-f7ad-4b38-a69f-9a14345af1c8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d3bb437bdac371dd4e22f20f304a5381c128ac2b774c51bc4f748a1c7aa3834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://129d51f84c8798ed7b1078f34899d4231c7bf4e1a75212f694c6c027b8463b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://290e37c77297fae7dcb3540911f6b2917e26daee3623c9106e34de394ccfa01f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7088f5a3752b8778de247eacb408d511197d1501e8e2811a98a9b713ca1fca04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:51Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.354173 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:51Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.361546 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.361576 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.361585 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.361683 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.361728 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:51Z","lastTransitionTime":"2025-09-29T21:25:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.366732 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:51Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.377859 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:51Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.388936 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a92f3bb864cba0c6615e8a07233e923c0d0681e83ad6722aa7c7043fb5966d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:51Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.398159 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dz6zq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c16af8d-e647-4820-b96f-298cce113ab1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e64c28e47bb861649d58b34f5e3c369cfaf8c808435b32a13ebfc2c02b7b6db9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbgg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dz6zq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:51Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.416912 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85fccbbf-637b-4195-89ba-828db0f10fb3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11c497bb84920c85649975dc5a3ba883723017d5503200e47c08427f45b9edfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a594d1677deac968c88fac4a64d4b047cf02ad8d6db74ee2ebe170d4135f588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2db0acd86c2dfcd0303626aa6b66832b18aec98dab908328288cdf0e0a18f24d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72188960402faa7956adf357824c6ea0a7015a75ed5191853ca95b12feb2cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf2588ddec2be0fa843630cdc268617c399a7a1615705cc4b088fc806031d557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0546d8270e2cbba06a241e421fa7ea130e53302a3f71f81829198d7ab8f3e6b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0546d8270e2cbba06a241e421fa7ea130e53302a3f71f81829198d7ab8f3e6b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123ea91d0a01f780d09196824c362eab68a4b8d943d414e80a3a0bbc2959e178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://123ea91d0a01f780d09196824c362eab68a4b8d943d414e80a3a0bbc2959e178\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://17751d3a9dd568ddb6c27efe08354f37aa4af7a91312e8c41e3e2e509d5be8ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://17751d3a9dd568ddb6c27efe08354f37aa4af7a91312e8c41e3e2e509d5be8ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:51Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.430767 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c6a0af8cc03379c6fa2a3969e5852a340c32bd8513dc7c963e074bb1694d436\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:51Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.443207 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dnhjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fe62a53-f0b8-4c66-8adf-3b9f8bef4195\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b11f03867ce6c678d04431a19903c21acd7665f0cca4c7b0344e9d1cf0e17e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64hpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dnhjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:51Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.461874 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bp485" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"364060da-3bac-4f3e-b8b8-a64b0441cb5e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7962008bbec5a6556b4ab76cf151a68e971bde07fdc5fe560c561f42cf597d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7962008bbec5a6556b4ab76cf151a68e971bde07fdc5fe560c561f42cf597d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a6af280cf9196a9ed2a1798f7f4e7d6504ef6707c1d265fb9b232c02d70c205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a6af280cf9196a9ed2a1798f7f4e7d6504ef6707c1d265fb9b232c02d70c205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4783145a17a1233241a3766d9b259d16ed82b2076ff6790c1a368d713271cd91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4783145a17a1233241a3766d9b259d16ed82b2076ff6790c1a368d713271cd91\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0bb5fec15ae4eb585d8ba1c0bedabe62a4094ca50044a09b44439dc555f064d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0bb5fec15ae4eb585d8ba1c0bedabe62a4094ca50044a09b44439dc555f064d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c153df9063b1a1e897e8d02e139d641d00d84dce42004be4c69dd55d44e7436b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c153df9063b1a1e897e8d02e139d641d00d84dce42004be4c69dd55d44e7436b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bp485\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:51Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.463907 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.463973 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.463986 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.464016 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.464030 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:51Z","lastTransitionTime":"2025-09-29T21:25:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.567599 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.567650 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.567663 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.567732 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.567771 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:51Z","lastTransitionTime":"2025-09-29T21:25:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.670953 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.671013 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.671042 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.671066 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.671084 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:51Z","lastTransitionTime":"2025-09-29T21:25:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.700291 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 21:25:51 crc kubenswrapper[4911]: E0929 21:25:51.700441 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.773540 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.773630 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.773644 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.773678 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.773689 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:51Z","lastTransitionTime":"2025-09-29T21:25:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.875744 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.875813 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.875825 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.875843 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.875856 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:51Z","lastTransitionTime":"2025-09-29T21:25:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.978306 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.978355 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.978366 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.978383 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.978393 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:51Z","lastTransitionTime":"2025-09-29T21:25:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.982518 4911 generic.go:334] "Generic (PLEG): container finished" podID="364060da-3bac-4f3e-b8b8-a64b0441cb5e" containerID="f9acf5d7a17bf6bb6d1ad415d830594b8f6b986fbb7c58706b19d28546dfae6c" exitCode=0 Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.982569 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-bp485" event={"ID":"364060da-3bac-4f3e-b8b8-a64b0441cb5e","Type":"ContainerDied","Data":"f9acf5d7a17bf6bb6d1ad415d830594b8f6b986fbb7c58706b19d28546dfae6c"} Sep 29 21:25:51 crc kubenswrapper[4911]: I0929 21:25:51.982689 4911 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.000251 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w647f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50640abc-40db-4390-82d1-f3cfc76da71c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0479676aa753b6e60f9a57eb2a3055ddc5b10bce1547ac60e216c0865e79f24b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://568d099d7beea914cf9f9eb703e36dd179359d3f4406bae454334e39b0de79ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w647f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:51Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.017780 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4052e710e0a06bf38d26e6006248d090051284bc0eae7a68b17241ef8016909\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a7d7fad5efb67cc7ef796977f03cc9c0e69fcc4340ca28749fffcc13236ed8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:52Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.034094 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lrfbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1179c900-e866-4c5a-bb06-6032cc03a075\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bb16b2f889ccaf66b96ffea2f6585967c14530a95a95000b652ce817e97c37e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jwhfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lrfbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:52Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.052951 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e3aa70f-b0da-44c9-a850-96d4494b02fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3568f71a4a1762e0d9be8ce4c317471f3f99a7c5b92b7daeb9fbace778f7e66b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00681163bfe474dabf977e0c43ac71148c2a472efd4bc57f3d145ba1134a74aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db1941e9959584d925b6ac546c234ce0dfaa2960a1277f282b51deb5ee56ec87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e87c1e61cde26c6ae4496e13ebe8e46857c8656720399f3d89b9e9a75bb4681d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c06f6555e780f2f9f69cabcd3e26595acfde3e973387ed7b10345c673c17f5d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c62aa953604f4ce35ca351ecf4e03da067af96f004c3ba3ea5f3a54b89def33d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://412f7b8119d990462b71c2e7168c9940ca770637f085b4767c75af182ecba2de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68ec54cc6e84d45b9550a2dcbd53b3ffc8cc97cb94da8d9961b29965af08b7e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9wxd8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:52Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.068332 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f56832a-7fd7-428e-85ee-55c5dea7b67a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ff3cf4013e69405637fe1483d5eb8ab2d12a0ba222c8446d93445dcb94ac9b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b32578d3e75bb839ad134f88850ba1b26e57846431fac71b64c5ddbce802464\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4539e6f57be0c686d5ea6ccbf1537c7c93698f81d2fdcfe5a143ce1f014e09\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57c9f68393ebed4fd5fe219544b0585a743989f8c54239e19ec702bddf9c30f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9dfa222360c949632050bd1dc5453a14c0dfa0d4614282132ceea2297405443e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T21:25:36Z\\\",\\\"message\\\":\\\"W0929 21:25:25.867198 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 21:25:25.867655 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759181125 cert, and key in /tmp/serving-cert-2091738680/serving-signer.crt, /tmp/serving-cert-2091738680/serving-signer.key\\\\nI0929 21:25:26.230899 1 observer_polling.go:159] Starting file observer\\\\nW0929 21:25:26.235864 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 21:25:26.236118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 21:25:26.238856 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2091738680/tls.crt::/tmp/serving-cert-2091738680/tls.key\\\\\\\"\\\\nF0929 21:25:36.606155 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47cf5d1e65f887e84d3c0694d5038edf3451c4af14d08a29d48562d05e36ea26\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:52Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.081399 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8646497-f7ad-4b38-a69f-9a14345af1c8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d3bb437bdac371dd4e22f20f304a5381c128ac2b774c51bc4f748a1c7aa3834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://129d51f84c8798ed7b1078f34899d4231c7bf4e1a75212f694c6c027b8463b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://290e37c77297fae7dcb3540911f6b2917e26daee3623c9106e34de394ccfa01f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7088f5a3752b8778de247eacb408d511197d1501e8e2811a98a9b713ca1fca04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:52Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.087813 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.087861 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.087872 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.087891 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.087902 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:52Z","lastTransitionTime":"2025-09-29T21:25:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.093849 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:52Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.105781 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:52Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.118372 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:52Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.129774 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a92f3bb864cba0c6615e8a07233e923c0d0681e83ad6722aa7c7043fb5966d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:52Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.140098 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dz6zq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c16af8d-e647-4820-b96f-298cce113ab1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e64c28e47bb861649d58b34f5e3c369cfaf8c808435b32a13ebfc2c02b7b6db9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbgg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dz6zq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:52Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.164063 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85fccbbf-637b-4195-89ba-828db0f10fb3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11c497bb84920c85649975dc5a3ba883723017d5503200e47c08427f45b9edfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a594d1677deac968c88fac4a64d4b047cf02ad8d6db74ee2ebe170d4135f588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2db0acd86c2dfcd0303626aa6b66832b18aec98dab908328288cdf0e0a18f24d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72188960402faa7956adf357824c6ea0a7015a75ed5191853ca95b12feb2cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf2588ddec2be0fa843630cdc268617c399a7a1615705cc4b088fc806031d557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0546d8270e2cbba06a241e421fa7ea130e53302a3f71f81829198d7ab8f3e6b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0546d8270e2cbba06a241e421fa7ea130e53302a3f71f81829198d7ab8f3e6b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123ea91d0a01f780d09196824c362eab68a4b8d943d414e80a3a0bbc2959e178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://123ea91d0a01f780d09196824c362eab68a4b8d943d414e80a3a0bbc2959e178\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://17751d3a9dd568ddb6c27efe08354f37aa4af7a91312e8c41e3e2e509d5be8ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://17751d3a9dd568ddb6c27efe08354f37aa4af7a91312e8c41e3e2e509d5be8ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:52Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.179844 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c6a0af8cc03379c6fa2a3969e5852a340c32bd8513dc7c963e074bb1694d436\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:52Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.191337 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.191392 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.191407 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.191431 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.191450 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:52Z","lastTransitionTime":"2025-09-29T21:25:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.195600 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dnhjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fe62a53-f0b8-4c66-8adf-3b9f8bef4195\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b11f03867ce6c678d04431a19903c21acd7665f0cca4c7b0344e9d1cf0e17e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64hpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dnhjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:52Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.220004 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bp485" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"364060da-3bac-4f3e-b8b8-a64b0441cb5e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7962008bbec5a6556b4ab76cf151a68e971bde07fdc5fe560c561f42cf597d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7962008bbec5a6556b4ab76cf151a68e971bde07fdc5fe560c561f42cf597d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a6af280cf9196a9ed2a1798f7f4e7d6504ef6707c1d265fb9b232c02d70c205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a6af280cf9196a9ed2a1798f7f4e7d6504ef6707c1d265fb9b232c02d70c205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4783145a17a1233241a3766d9b259d16ed82b2076ff6790c1a368d713271cd91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4783145a17a1233241a3766d9b259d16ed82b2076ff6790c1a368d713271cd91\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0bb5fec15ae4eb585d8ba1c0bedabe62a4094ca50044a09b44439dc555f064d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0bb5fec15ae4eb585d8ba1c0bedabe62a4094ca50044a09b44439dc555f064d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c153df9063b1a1e897e8d02e139d641d00d84dce42004be4c69dd55d44e7436b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c153df9063b1a1e897e8d02e139d641d00d84dce42004be4c69dd55d44e7436b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9acf5d7a17bf6bb6d1ad415d830594b8f6b986fbb7c58706b19d28546dfae6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9acf5d7a17bf6bb6d1ad415d830594b8f6b986fbb7c58706b19d28546dfae6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bp485\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:52Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.294891 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.295328 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.295351 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.295372 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.295384 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:52Z","lastTransitionTime":"2025-09-29T21:25:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.398676 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.398737 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.398750 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.398772 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.398807 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:52Z","lastTransitionTime":"2025-09-29T21:25:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.502272 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.502326 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.502340 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.502360 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.502376 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:52Z","lastTransitionTime":"2025-09-29T21:25:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.605707 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.605761 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.605773 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.605808 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.605819 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:52Z","lastTransitionTime":"2025-09-29T21:25:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.700866 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.700913 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:25:52 crc kubenswrapper[4911]: E0929 21:25:52.701054 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 21:25:52 crc kubenswrapper[4911]: E0929 21:25:52.701155 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.709068 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.709127 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.709141 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.709160 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.709175 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:52Z","lastTransitionTime":"2025-09-29T21:25:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.721403 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dz6zq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c16af8d-e647-4820-b96f-298cce113ab1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e64c28e47bb861649d58b34f5e3c369cfaf8c808435b32a13ebfc2c02b7b6db9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbgg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dz6zq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:52Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.755128 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85fccbbf-637b-4195-89ba-828db0f10fb3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11c497bb84920c85649975dc5a3ba883723017d5503200e47c08427f45b9edfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a594d1677deac968c88fac4a64d4b047cf02ad8d6db74ee2ebe170d4135f588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2db0acd86c2dfcd0303626aa6b66832b18aec98dab908328288cdf0e0a18f24d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72188960402faa7956adf357824c6ea0a7015a75ed5191853ca95b12feb2cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf2588ddec2be0fa843630cdc268617c399a7a1615705cc4b088fc806031d557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0546d8270e2cbba06a241e421fa7ea130e53302a3f71f81829198d7ab8f3e6b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0546d8270e2cbba06a241e421fa7ea130e53302a3f71f81829198d7ab8f3e6b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123ea91d0a01f780d09196824c362eab68a4b8d943d414e80a3a0bbc2959e178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://123ea91d0a01f780d09196824c362eab68a4b8d943d414e80a3a0bbc2959e178\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://17751d3a9dd568ddb6c27efe08354f37aa4af7a91312e8c41e3e2e509d5be8ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://17751d3a9dd568ddb6c27efe08354f37aa4af7a91312e8c41e3e2e509d5be8ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:52Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.775479 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c6a0af8cc03379c6fa2a3969e5852a340c32bd8513dc7c963e074bb1694d436\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:52Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.791286 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dnhjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fe62a53-f0b8-4c66-8adf-3b9f8bef4195\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b11f03867ce6c678d04431a19903c21acd7665f0cca4c7b0344e9d1cf0e17e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64hpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dnhjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:52Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.808054 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bp485" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"364060da-3bac-4f3e-b8b8-a64b0441cb5e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7962008bbec5a6556b4ab76cf151a68e971bde07fdc5fe560c561f42cf597d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7962008bbec5a6556b4ab76cf151a68e971bde07fdc5fe560c561f42cf597d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a6af280cf9196a9ed2a1798f7f4e7d6504ef6707c1d265fb9b232c02d70c205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a6af280cf9196a9ed2a1798f7f4e7d6504ef6707c1d265fb9b232c02d70c205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4783145a17a1233241a3766d9b259d16ed82b2076ff6790c1a368d713271cd91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4783145a17a1233241a3766d9b259d16ed82b2076ff6790c1a368d713271cd91\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0bb5fec15ae4eb585d8ba1c0bedabe62a4094ca50044a09b44439dc555f064d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0bb5fec15ae4eb585d8ba1c0bedabe62a4094ca50044a09b44439dc555f064d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c153df9063b1a1e897e8d02e139d641d00d84dce42004be4c69dd55d44e7436b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c153df9063b1a1e897e8d02e139d641d00d84dce42004be4c69dd55d44e7436b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9acf5d7a17bf6bb6d1ad415d830594b8f6b986fbb7c58706b19d28546dfae6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9acf5d7a17bf6bb6d1ad415d830594b8f6b986fbb7c58706b19d28546dfae6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bp485\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:52Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.812033 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.812095 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.812114 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.812179 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.812201 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:52Z","lastTransitionTime":"2025-09-29T21:25:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.826651 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w647f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50640abc-40db-4390-82d1-f3cfc76da71c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0479676aa753b6e60f9a57eb2a3055ddc5b10bce1547ac60e216c0865e79f24b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://568d099d7beea914cf9f9eb703e36dd179359d3f4406bae454334e39b0de79ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w647f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:52Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.842382 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4052e710e0a06bf38d26e6006248d090051284bc0eae7a68b17241ef8016909\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a7d7fad5efb67cc7ef796977f03cc9c0e69fcc4340ca28749fffcc13236ed8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:52Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.863547 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lrfbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1179c900-e866-4c5a-bb06-6032cc03a075\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bb16b2f889ccaf66b96ffea2f6585967c14530a95a95000b652ce817e97c37e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jwhfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lrfbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:52Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.893591 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e3aa70f-b0da-44c9-a850-96d4494b02fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3568f71a4a1762e0d9be8ce4c317471f3f99a7c5b92b7daeb9fbace778f7e66b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00681163bfe474dabf977e0c43ac71148c2a472efd4bc57f3d145ba1134a74aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db1941e9959584d925b6ac546c234ce0dfaa2960a1277f282b51deb5ee56ec87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e87c1e61cde26c6ae4496e13ebe8e46857c8656720399f3d89b9e9a75bb4681d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c06f6555e780f2f9f69cabcd3e26595acfde3e973387ed7b10345c673c17f5d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c62aa953604f4ce35ca351ecf4e03da067af96f004c3ba3ea5f3a54b89def33d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://412f7b8119d990462b71c2e7168c9940ca770637f085b4767c75af182ecba2de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68ec54cc6e84d45b9550a2dcbd53b3ffc8cc97cb94da8d9961b29965af08b7e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9wxd8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:52Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.913873 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f56832a-7fd7-428e-85ee-55c5dea7b67a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ff3cf4013e69405637fe1483d5eb8ab2d12a0ba222c8446d93445dcb94ac9b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b32578d3e75bb839ad134f88850ba1b26e57846431fac71b64c5ddbce802464\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4539e6f57be0c686d5ea6ccbf1537c7c93698f81d2fdcfe5a143ce1f014e09\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57c9f68393ebed4fd5fe219544b0585a743989f8c54239e19ec702bddf9c30f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9dfa222360c949632050bd1dc5453a14c0dfa0d4614282132ceea2297405443e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T21:25:36Z\\\",\\\"message\\\":\\\"W0929 21:25:25.867198 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 21:25:25.867655 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759181125 cert, and key in /tmp/serving-cert-2091738680/serving-signer.crt, /tmp/serving-cert-2091738680/serving-signer.key\\\\nI0929 21:25:26.230899 1 observer_polling.go:159] Starting file observer\\\\nW0929 21:25:26.235864 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 21:25:26.236118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 21:25:26.238856 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2091738680/tls.crt::/tmp/serving-cert-2091738680/tls.key\\\\\\\"\\\\nF0929 21:25:36.606155 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47cf5d1e65f887e84d3c0694d5038edf3451c4af14d08a29d48562d05e36ea26\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:52Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.914857 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.914914 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.914923 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.914940 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.914976 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:52Z","lastTransitionTime":"2025-09-29T21:25:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.927559 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8646497-f7ad-4b38-a69f-9a14345af1c8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d3bb437bdac371dd4e22f20f304a5381c128ac2b774c51bc4f748a1c7aa3834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://129d51f84c8798ed7b1078f34899d4231c7bf4e1a75212f694c6c027b8463b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://290e37c77297fae7dcb3540911f6b2917e26daee3623c9106e34de394ccfa01f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7088f5a3752b8778de247eacb408d511197d1501e8e2811a98a9b713ca1fca04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:52Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.940621 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:52Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.959002 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:52Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.976699 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:52Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.990429 4911 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.997191 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a92f3bb864cba0c6615e8a07233e923c0d0681e83ad6722aa7c7043fb5966d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:52Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:52 crc kubenswrapper[4911]: I0929 21:25:52.997276 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-bp485" event={"ID":"364060da-3bac-4f3e-b8b8-a64b0441cb5e","Type":"ContainerStarted","Data":"7dba9267c23bc8990d70b320a688e96796cf75aca99f5a3ce12fa82327cb2d3e"} Sep 29 21:25:53 crc kubenswrapper[4911]: I0929 21:25:53.014375 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w647f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50640abc-40db-4390-82d1-f3cfc76da71c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0479676aa753b6e60f9a57eb2a3055ddc5b10bce1547ac60e216c0865e79f24b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://568d099d7beea914cf9f9eb703e36dd179359d3f4406bae454334e39b0de79ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w647f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:53Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:53 crc kubenswrapper[4911]: I0929 21:25:53.017656 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:53 crc kubenswrapper[4911]: I0929 21:25:53.017682 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:53 crc kubenswrapper[4911]: I0929 21:25:53.017694 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:53 crc kubenswrapper[4911]: I0929 21:25:53.017714 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:53 crc kubenswrapper[4911]: I0929 21:25:53.017730 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:53Z","lastTransitionTime":"2025-09-29T21:25:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:53 crc kubenswrapper[4911]: I0929 21:25:53.045702 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e3aa70f-b0da-44c9-a850-96d4494b02fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3568f71a4a1762e0d9be8ce4c317471f3f99a7c5b92b7daeb9fbace778f7e66b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00681163bfe474dabf977e0c43ac71148c2a472efd4bc57f3d145ba1134a74aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db1941e9959584d925b6ac546c234ce0dfaa2960a1277f282b51deb5ee56ec87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e87c1e61cde26c6ae4496e13ebe8e46857c8656720399f3d89b9e9a75bb4681d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c06f6555e780f2f9f69cabcd3e26595acfde3e973387ed7b10345c673c17f5d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c62aa953604f4ce35ca351ecf4e03da067af96f004c3ba3ea5f3a54b89def33d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://412f7b8119d990462b71c2e7168c9940ca770637f085b4767c75af182ecba2de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68ec54cc6e84d45b9550a2dcbd53b3ffc8cc97cb94da8d9961b29965af08b7e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9wxd8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:53Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:53 crc kubenswrapper[4911]: I0929 21:25:53.061000 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f56832a-7fd7-428e-85ee-55c5dea7b67a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ff3cf4013e69405637fe1483d5eb8ab2d12a0ba222c8446d93445dcb94ac9b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b32578d3e75bb839ad134f88850ba1b26e57846431fac71b64c5ddbce802464\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4539e6f57be0c686d5ea6ccbf1537c7c93698f81d2fdcfe5a143ce1f014e09\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57c9f68393ebed4fd5fe219544b0585a743989f8c54239e19ec702bddf9c30f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9dfa222360c949632050bd1dc5453a14c0dfa0d4614282132ceea2297405443e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T21:25:36Z\\\",\\\"message\\\":\\\"W0929 21:25:25.867198 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 21:25:25.867655 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759181125 cert, and key in /tmp/serving-cert-2091738680/serving-signer.crt, /tmp/serving-cert-2091738680/serving-signer.key\\\\nI0929 21:25:26.230899 1 observer_polling.go:159] Starting file observer\\\\nW0929 21:25:26.235864 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 21:25:26.236118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 21:25:26.238856 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2091738680/tls.crt::/tmp/serving-cert-2091738680/tls.key\\\\\\\"\\\\nF0929 21:25:36.606155 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47cf5d1e65f887e84d3c0694d5038edf3451c4af14d08a29d48562d05e36ea26\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:53Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:53 crc kubenswrapper[4911]: I0929 21:25:53.075260 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8646497-f7ad-4b38-a69f-9a14345af1c8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d3bb437bdac371dd4e22f20f304a5381c128ac2b774c51bc4f748a1c7aa3834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://129d51f84c8798ed7b1078f34899d4231c7bf4e1a75212f694c6c027b8463b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://290e37c77297fae7dcb3540911f6b2917e26daee3623c9106e34de394ccfa01f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7088f5a3752b8778de247eacb408d511197d1501e8e2811a98a9b713ca1fca04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:53Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:53 crc kubenswrapper[4911]: I0929 21:25:53.092681 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:53Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:53 crc kubenswrapper[4911]: I0929 21:25:53.109226 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:53Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:53 crc kubenswrapper[4911]: I0929 21:25:53.121777 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:53 crc kubenswrapper[4911]: I0929 21:25:53.121841 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:53 crc kubenswrapper[4911]: I0929 21:25:53.121852 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:53 crc kubenswrapper[4911]: I0929 21:25:53.121871 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:53 crc kubenswrapper[4911]: I0929 21:25:53.121909 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:53Z","lastTransitionTime":"2025-09-29T21:25:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:53 crc kubenswrapper[4911]: I0929 21:25:53.126328 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4052e710e0a06bf38d26e6006248d090051284bc0eae7a68b17241ef8016909\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a7d7fad5efb67cc7ef796977f03cc9c0e69fcc4340ca28749fffcc13236ed8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:53Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:53 crc kubenswrapper[4911]: I0929 21:25:53.146389 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lrfbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1179c900-e866-4c5a-bb06-6032cc03a075\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bb16b2f889ccaf66b96ffea2f6585967c14530a95a95000b652ce817e97c37e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jwhfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lrfbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:53Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:53 crc kubenswrapper[4911]: I0929 21:25:53.167404 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:53Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:53 crc kubenswrapper[4911]: I0929 21:25:53.185093 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a92f3bb864cba0c6615e8a07233e923c0d0681e83ad6722aa7c7043fb5966d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:53Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:53 crc kubenswrapper[4911]: I0929 21:25:53.208781 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85fccbbf-637b-4195-89ba-828db0f10fb3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11c497bb84920c85649975dc5a3ba883723017d5503200e47c08427f45b9edfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a594d1677deac968c88fac4a64d4b047cf02ad8d6db74ee2ebe170d4135f588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2db0acd86c2dfcd0303626aa6b66832b18aec98dab908328288cdf0e0a18f24d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72188960402faa7956adf357824c6ea0a7015a75ed5191853ca95b12feb2cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf2588ddec2be0fa843630cdc268617c399a7a1615705cc4b088fc806031d557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0546d8270e2cbba06a241e421fa7ea130e53302a3f71f81829198d7ab8f3e6b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0546d8270e2cbba06a241e421fa7ea130e53302a3f71f81829198d7ab8f3e6b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123ea91d0a01f780d09196824c362eab68a4b8d943d414e80a3a0bbc2959e178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://123ea91d0a01f780d09196824c362eab68a4b8d943d414e80a3a0bbc2959e178\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://17751d3a9dd568ddb6c27efe08354f37aa4af7a91312e8c41e3e2e509d5be8ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://17751d3a9dd568ddb6c27efe08354f37aa4af7a91312e8c41e3e2e509d5be8ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:53Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:53 crc kubenswrapper[4911]: I0929 21:25:53.224860 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:53 crc kubenswrapper[4911]: I0929 21:25:53.224911 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:53 crc kubenswrapper[4911]: I0929 21:25:53.224926 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:53 crc kubenswrapper[4911]: I0929 21:25:53.224947 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:53 crc kubenswrapper[4911]: I0929 21:25:53.224964 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:53Z","lastTransitionTime":"2025-09-29T21:25:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:53 crc kubenswrapper[4911]: I0929 21:25:53.226034 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c6a0af8cc03379c6fa2a3969e5852a340c32bd8513dc7c963e074bb1694d436\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:53Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:53 crc kubenswrapper[4911]: I0929 21:25:53.240982 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dnhjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fe62a53-f0b8-4c66-8adf-3b9f8bef4195\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b11f03867ce6c678d04431a19903c21acd7665f0cca4c7b0344e9d1cf0e17e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64hpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dnhjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:53Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:53 crc kubenswrapper[4911]: I0929 21:25:53.261097 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bp485" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"364060da-3bac-4f3e-b8b8-a64b0441cb5e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7dba9267c23bc8990d70b320a688e96796cf75aca99f5a3ce12fa82327cb2d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7962008bbec5a6556b4ab76cf151a68e971bde07fdc5fe560c561f42cf597d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7962008bbec5a6556b4ab76cf151a68e971bde07fdc5fe560c561f42cf597d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a6af280cf9196a9ed2a1798f7f4e7d6504ef6707c1d265fb9b232c02d70c205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a6af280cf9196a9ed2a1798f7f4e7d6504ef6707c1d265fb9b232c02d70c205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4783145a17a1233241a3766d9b259d16ed82b2076ff6790c1a368d713271cd91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4783145a17a1233241a3766d9b259d16ed82b2076ff6790c1a368d713271cd91\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0bb5fec15ae4eb585d8ba1c0bedabe62a4094ca50044a09b44439dc555f064d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0bb5fec15ae4eb585d8ba1c0bedabe62a4094ca50044a09b44439dc555f064d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c153df9063b1a1e897e8d02e139d641d00d84dce42004be4c69dd55d44e7436b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c153df9063b1a1e897e8d02e139d641d00d84dce42004be4c69dd55d44e7436b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9acf5d7a17bf6bb6d1ad415d830594b8f6b986fbb7c58706b19d28546dfae6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9acf5d7a17bf6bb6d1ad415d830594b8f6b986fbb7c58706b19d28546dfae6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bp485\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:53Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:53 crc kubenswrapper[4911]: I0929 21:25:53.275207 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dz6zq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c16af8d-e647-4820-b96f-298cce113ab1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e64c28e47bb861649d58b34f5e3c369cfaf8c808435b32a13ebfc2c02b7b6db9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbgg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dz6zq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:53Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:53 crc kubenswrapper[4911]: I0929 21:25:53.329390 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:53 crc kubenswrapper[4911]: I0929 21:25:53.329455 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:53 crc kubenswrapper[4911]: I0929 21:25:53.329475 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:53 crc kubenswrapper[4911]: I0929 21:25:53.329501 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:53 crc kubenswrapper[4911]: I0929 21:25:53.329520 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:53Z","lastTransitionTime":"2025-09-29T21:25:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:53 crc kubenswrapper[4911]: I0929 21:25:53.432619 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:53 crc kubenswrapper[4911]: I0929 21:25:53.432669 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:53 crc kubenswrapper[4911]: I0929 21:25:53.432683 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:53 crc kubenswrapper[4911]: I0929 21:25:53.432705 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:53 crc kubenswrapper[4911]: I0929 21:25:53.432720 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:53Z","lastTransitionTime":"2025-09-29T21:25:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:53 crc kubenswrapper[4911]: I0929 21:25:53.535715 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:53 crc kubenswrapper[4911]: I0929 21:25:53.535815 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:53 crc kubenswrapper[4911]: I0929 21:25:53.535832 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:53 crc kubenswrapper[4911]: I0929 21:25:53.535856 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:53 crc kubenswrapper[4911]: I0929 21:25:53.535869 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:53Z","lastTransitionTime":"2025-09-29T21:25:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:53 crc kubenswrapper[4911]: I0929 21:25:53.641117 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:53 crc kubenswrapper[4911]: I0929 21:25:53.641180 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:53 crc kubenswrapper[4911]: I0929 21:25:53.641195 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:53 crc kubenswrapper[4911]: I0929 21:25:53.641214 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:53 crc kubenswrapper[4911]: I0929 21:25:53.641225 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:53Z","lastTransitionTime":"2025-09-29T21:25:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:53 crc kubenswrapper[4911]: I0929 21:25:53.700509 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 21:25:53 crc kubenswrapper[4911]: E0929 21:25:53.700692 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 21:25:53 crc kubenswrapper[4911]: I0929 21:25:53.744437 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:53 crc kubenswrapper[4911]: I0929 21:25:53.744487 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:53 crc kubenswrapper[4911]: I0929 21:25:53.744500 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:53 crc kubenswrapper[4911]: I0929 21:25:53.744520 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:53 crc kubenswrapper[4911]: I0929 21:25:53.744535 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:53Z","lastTransitionTime":"2025-09-29T21:25:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:53 crc kubenswrapper[4911]: I0929 21:25:53.847035 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:53 crc kubenswrapper[4911]: I0929 21:25:53.847086 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:53 crc kubenswrapper[4911]: I0929 21:25:53.847099 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:53 crc kubenswrapper[4911]: I0929 21:25:53.847118 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:53 crc kubenswrapper[4911]: I0929 21:25:53.847134 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:53Z","lastTransitionTime":"2025-09-29T21:25:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:53 crc kubenswrapper[4911]: I0929 21:25:53.950639 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:53 crc kubenswrapper[4911]: I0929 21:25:53.950694 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:53 crc kubenswrapper[4911]: I0929 21:25:53.950707 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:53 crc kubenswrapper[4911]: I0929 21:25:53.950727 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:53 crc kubenswrapper[4911]: I0929 21:25:53.950738 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:53Z","lastTransitionTime":"2025-09-29T21:25:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:53 crc kubenswrapper[4911]: I0929 21:25:53.998733 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-9wxd8_4e3aa70f-b0da-44c9-a850-96d4494b02fc/ovnkube-controller/0.log" Sep 29 21:25:54 crc kubenswrapper[4911]: I0929 21:25:54.002213 4911 generic.go:334] "Generic (PLEG): container finished" podID="4e3aa70f-b0da-44c9-a850-96d4494b02fc" containerID="412f7b8119d990462b71c2e7168c9940ca770637f085b4767c75af182ecba2de" exitCode=1 Sep 29 21:25:54 crc kubenswrapper[4911]: I0929 21:25:54.002276 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" event={"ID":"4e3aa70f-b0da-44c9-a850-96d4494b02fc","Type":"ContainerDied","Data":"412f7b8119d990462b71c2e7168c9940ca770637f085b4767c75af182ecba2de"} Sep 29 21:25:54 crc kubenswrapper[4911]: I0929 21:25:54.003154 4911 scope.go:117] "RemoveContainer" containerID="412f7b8119d990462b71c2e7168c9940ca770637f085b4767c75af182ecba2de" Sep 29 21:25:54 crc kubenswrapper[4911]: I0929 21:25:54.037517 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e3aa70f-b0da-44c9-a850-96d4494b02fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3568f71a4a1762e0d9be8ce4c317471f3f99a7c5b92b7daeb9fbace778f7e66b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00681163bfe474dabf977e0c43ac71148c2a472efd4bc57f3d145ba1134a74aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db1941e9959584d925b6ac546c234ce0dfaa2960a1277f282b51deb5ee56ec87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e87c1e61cde26c6ae4496e13ebe8e46857c8656720399f3d89b9e9a75bb4681d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c06f6555e780f2f9f69cabcd3e26595acfde3e973387ed7b10345c673c17f5d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c62aa953604f4ce35ca351ecf4e03da067af96f004c3ba3ea5f3a54b89def33d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://412f7b8119d990462b71c2e7168c9940ca770637f085b4767c75af182ecba2de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://412f7b8119d990462b71c2e7168c9940ca770637f085b4767c75af182ecba2de\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T21:25:53Z\\\",\\\"message\\\":\\\"985843 6187 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 21:25:52.986061 6187 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 21:25:52.986601 6187 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0929 21:25:52.986648 6187 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0929 21:25:52.986660 6187 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0929 21:25:52.986680 6187 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0929 21:25:52.986694 6187 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0929 21:25:52.986696 6187 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0929 21:25:52.986731 6187 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0929 21:25:52.986738 6187 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0929 21:25:52.986756 6187 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0929 21:25:52.986815 6187 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0929 21:25:52.986830 6187 factory.go:656] Stopping watch factory\\\\nI0929 21:25:52.986900 6187 ovnkube.go:599] Stopped ovnkube\\\\nI0929 21:25:52.986842 6187 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68ec54cc6e84d45b9550a2dcbd53b3ffc8cc97cb94da8d9961b29965af08b7e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9wxd8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:54Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:54 crc kubenswrapper[4911]: I0929 21:25:54.053489 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:54 crc kubenswrapper[4911]: I0929 21:25:54.053947 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:54 crc kubenswrapper[4911]: I0929 21:25:54.053961 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:54 crc kubenswrapper[4911]: I0929 21:25:54.053982 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:54 crc kubenswrapper[4911]: I0929 21:25:54.053996 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:54Z","lastTransitionTime":"2025-09-29T21:25:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:54 crc kubenswrapper[4911]: I0929 21:25:54.061220 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f56832a-7fd7-428e-85ee-55c5dea7b67a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ff3cf4013e69405637fe1483d5eb8ab2d12a0ba222c8446d93445dcb94ac9b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b32578d3e75bb839ad134f88850ba1b26e57846431fac71b64c5ddbce802464\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4539e6f57be0c686d5ea6ccbf1537c7c93698f81d2fdcfe5a143ce1f014e09\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57c9f68393ebed4fd5fe219544b0585a743989f8c54239e19ec702bddf9c30f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9dfa222360c949632050bd1dc5453a14c0dfa0d4614282132ceea2297405443e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T21:25:36Z\\\",\\\"message\\\":\\\"W0929 21:25:25.867198 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 21:25:25.867655 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759181125 cert, and key in /tmp/serving-cert-2091738680/serving-signer.crt, /tmp/serving-cert-2091738680/serving-signer.key\\\\nI0929 21:25:26.230899 1 observer_polling.go:159] Starting file observer\\\\nW0929 21:25:26.235864 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 21:25:26.236118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 21:25:26.238856 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2091738680/tls.crt::/tmp/serving-cert-2091738680/tls.key\\\\\\\"\\\\nF0929 21:25:36.606155 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47cf5d1e65f887e84d3c0694d5038edf3451c4af14d08a29d48562d05e36ea26\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:54Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:54 crc kubenswrapper[4911]: I0929 21:25:54.080587 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8646497-f7ad-4b38-a69f-9a14345af1c8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d3bb437bdac371dd4e22f20f304a5381c128ac2b774c51bc4f748a1c7aa3834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://129d51f84c8798ed7b1078f34899d4231c7bf4e1a75212f694c6c027b8463b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://290e37c77297fae7dcb3540911f6b2917e26daee3623c9106e34de394ccfa01f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7088f5a3752b8778de247eacb408d511197d1501e8e2811a98a9b713ca1fca04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:54Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:54 crc kubenswrapper[4911]: I0929 21:25:54.108173 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:54Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:54 crc kubenswrapper[4911]: I0929 21:25:54.124434 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:54Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:54 crc kubenswrapper[4911]: I0929 21:25:54.145602 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4052e710e0a06bf38d26e6006248d090051284bc0eae7a68b17241ef8016909\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a7d7fad5efb67cc7ef796977f03cc9c0e69fcc4340ca28749fffcc13236ed8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:54Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:54 crc kubenswrapper[4911]: I0929 21:25:54.157065 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:54 crc kubenswrapper[4911]: I0929 21:25:54.159957 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:54 crc kubenswrapper[4911]: I0929 21:25:54.160018 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:54 crc kubenswrapper[4911]: I0929 21:25:54.160046 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:54 crc kubenswrapper[4911]: I0929 21:25:54.160084 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:54Z","lastTransitionTime":"2025-09-29T21:25:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:54 crc kubenswrapper[4911]: I0929 21:25:54.161946 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lrfbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1179c900-e866-4c5a-bb06-6032cc03a075\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bb16b2f889ccaf66b96ffea2f6585967c14530a95a95000b652ce817e97c37e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jwhfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lrfbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:54Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:54 crc kubenswrapper[4911]: I0929 21:25:54.175011 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:54Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:54 crc kubenswrapper[4911]: I0929 21:25:54.187874 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a92f3bb864cba0c6615e8a07233e923c0d0681e83ad6722aa7c7043fb5966d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:54Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:54 crc kubenswrapper[4911]: I0929 21:25:54.215357 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85fccbbf-637b-4195-89ba-828db0f10fb3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11c497bb84920c85649975dc5a3ba883723017d5503200e47c08427f45b9edfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a594d1677deac968c88fac4a64d4b047cf02ad8d6db74ee2ebe170d4135f588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2db0acd86c2dfcd0303626aa6b66832b18aec98dab908328288cdf0e0a18f24d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72188960402faa7956adf357824c6ea0a7015a75ed5191853ca95b12feb2cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf2588ddec2be0fa843630cdc268617c399a7a1615705cc4b088fc806031d557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0546d8270e2cbba06a241e421fa7ea130e53302a3f71f81829198d7ab8f3e6b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0546d8270e2cbba06a241e421fa7ea130e53302a3f71f81829198d7ab8f3e6b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123ea91d0a01f780d09196824c362eab68a4b8d943d414e80a3a0bbc2959e178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://123ea91d0a01f780d09196824c362eab68a4b8d943d414e80a3a0bbc2959e178\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://17751d3a9dd568ddb6c27efe08354f37aa4af7a91312e8c41e3e2e509d5be8ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://17751d3a9dd568ddb6c27efe08354f37aa4af7a91312e8c41e3e2e509d5be8ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:54Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:54 crc kubenswrapper[4911]: I0929 21:25:54.239209 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c6a0af8cc03379c6fa2a3969e5852a340c32bd8513dc7c963e074bb1694d436\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:54Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:54 crc kubenswrapper[4911]: I0929 21:25:54.254167 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dnhjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fe62a53-f0b8-4c66-8adf-3b9f8bef4195\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b11f03867ce6c678d04431a19903c21acd7665f0cca4c7b0344e9d1cf0e17e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64hpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dnhjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:54Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:54 crc kubenswrapper[4911]: I0929 21:25:54.262563 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:54 crc kubenswrapper[4911]: I0929 21:25:54.262622 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:54 crc kubenswrapper[4911]: I0929 21:25:54.262641 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:54 crc kubenswrapper[4911]: I0929 21:25:54.262664 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:54 crc kubenswrapper[4911]: I0929 21:25:54.262679 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:54Z","lastTransitionTime":"2025-09-29T21:25:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:54 crc kubenswrapper[4911]: I0929 21:25:54.271109 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bp485" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"364060da-3bac-4f3e-b8b8-a64b0441cb5e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7dba9267c23bc8990d70b320a688e96796cf75aca99f5a3ce12fa82327cb2d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7962008bbec5a6556b4ab76cf151a68e971bde07fdc5fe560c561f42cf597d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7962008bbec5a6556b4ab76cf151a68e971bde07fdc5fe560c561f42cf597d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a6af280cf9196a9ed2a1798f7f4e7d6504ef6707c1d265fb9b232c02d70c205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a6af280cf9196a9ed2a1798f7f4e7d6504ef6707c1d265fb9b232c02d70c205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4783145a17a1233241a3766d9b259d16ed82b2076ff6790c1a368d713271cd91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4783145a17a1233241a3766d9b259d16ed82b2076ff6790c1a368d713271cd91\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0bb5fec15ae4eb585d8ba1c0bedabe62a4094ca50044a09b44439dc555f064d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0bb5fec15ae4eb585d8ba1c0bedabe62a4094ca50044a09b44439dc555f064d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c153df9063b1a1e897e8d02e139d641d00d84dce42004be4c69dd55d44e7436b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c153df9063b1a1e897e8d02e139d641d00d84dce42004be4c69dd55d44e7436b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9acf5d7a17bf6bb6d1ad415d830594b8f6b986fbb7c58706b19d28546dfae6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9acf5d7a17bf6bb6d1ad415d830594b8f6b986fbb7c58706b19d28546dfae6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bp485\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:54Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:54 crc kubenswrapper[4911]: I0929 21:25:54.283439 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dz6zq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c16af8d-e647-4820-b96f-298cce113ab1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e64c28e47bb861649d58b34f5e3c369cfaf8c808435b32a13ebfc2c02b7b6db9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbgg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dz6zq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:54Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:54 crc kubenswrapper[4911]: I0929 21:25:54.296322 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w647f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50640abc-40db-4390-82d1-f3cfc76da71c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0479676aa753b6e60f9a57eb2a3055ddc5b10bce1547ac60e216c0865e79f24b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://568d099d7beea914cf9f9eb703e36dd179359d3f4406bae454334e39b0de79ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w647f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:54Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:54 crc kubenswrapper[4911]: I0929 21:25:54.367378 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:54 crc kubenswrapper[4911]: I0929 21:25:54.367443 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:54 crc kubenswrapper[4911]: I0929 21:25:54.367459 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:54 crc kubenswrapper[4911]: I0929 21:25:54.367487 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:54 crc kubenswrapper[4911]: I0929 21:25:54.367508 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:54Z","lastTransitionTime":"2025-09-29T21:25:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:54 crc kubenswrapper[4911]: I0929 21:25:54.470649 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:54 crc kubenswrapper[4911]: I0929 21:25:54.470738 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:54 crc kubenswrapper[4911]: I0929 21:25:54.470759 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:54 crc kubenswrapper[4911]: I0929 21:25:54.470822 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:54 crc kubenswrapper[4911]: I0929 21:25:54.470847 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:54Z","lastTransitionTime":"2025-09-29T21:25:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:54 crc kubenswrapper[4911]: I0929 21:25:54.574962 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:54 crc kubenswrapper[4911]: I0929 21:25:54.575054 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:54 crc kubenswrapper[4911]: I0929 21:25:54.575086 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:54 crc kubenswrapper[4911]: I0929 21:25:54.575131 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:54 crc kubenswrapper[4911]: I0929 21:25:54.575159 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:54Z","lastTransitionTime":"2025-09-29T21:25:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:54 crc kubenswrapper[4911]: I0929 21:25:54.678602 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:54 crc kubenswrapper[4911]: I0929 21:25:54.678674 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:54 crc kubenswrapper[4911]: I0929 21:25:54.678701 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:54 crc kubenswrapper[4911]: I0929 21:25:54.678731 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:54 crc kubenswrapper[4911]: I0929 21:25:54.678751 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:54Z","lastTransitionTime":"2025-09-29T21:25:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:54 crc kubenswrapper[4911]: I0929 21:25:54.701072 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 21:25:54 crc kubenswrapper[4911]: I0929 21:25:54.701100 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:25:54 crc kubenswrapper[4911]: E0929 21:25:54.701305 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 21:25:54 crc kubenswrapper[4911]: E0929 21:25:54.701493 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 21:25:54 crc kubenswrapper[4911]: I0929 21:25:54.782730 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:54 crc kubenswrapper[4911]: I0929 21:25:54.782776 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:54 crc kubenswrapper[4911]: I0929 21:25:54.782803 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:54 crc kubenswrapper[4911]: I0929 21:25:54.782821 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:54 crc kubenswrapper[4911]: I0929 21:25:54.782833 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:54Z","lastTransitionTime":"2025-09-29T21:25:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:54 crc kubenswrapper[4911]: I0929 21:25:54.885781 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:54 crc kubenswrapper[4911]: I0929 21:25:54.885840 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:54 crc kubenswrapper[4911]: I0929 21:25:54.885850 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:54 crc kubenswrapper[4911]: I0929 21:25:54.885868 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:54 crc kubenswrapper[4911]: I0929 21:25:54.885881 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:54Z","lastTransitionTime":"2025-09-29T21:25:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:54 crc kubenswrapper[4911]: I0929 21:25:54.989228 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:54 crc kubenswrapper[4911]: I0929 21:25:54.989530 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:54 crc kubenswrapper[4911]: I0929 21:25:54.989619 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:54 crc kubenswrapper[4911]: I0929 21:25:54.989685 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:54 crc kubenswrapper[4911]: I0929 21:25:54.989804 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:54Z","lastTransitionTime":"2025-09-29T21:25:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:55 crc kubenswrapper[4911]: I0929 21:25:55.008909 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-9wxd8_4e3aa70f-b0da-44c9-a850-96d4494b02fc/ovnkube-controller/0.log" Sep 29 21:25:55 crc kubenswrapper[4911]: I0929 21:25:55.012946 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" event={"ID":"4e3aa70f-b0da-44c9-a850-96d4494b02fc","Type":"ContainerStarted","Data":"e367c219f65b163fea8802c80801d60df0804adc0f6d14f18ee540c114b4ba14"} Sep 29 21:25:55 crc kubenswrapper[4911]: I0929 21:25:55.013183 4911 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 21:25:55 crc kubenswrapper[4911]: I0929 21:25:55.030135 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:55Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:55 crc kubenswrapper[4911]: I0929 21:25:55.044104 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a92f3bb864cba0c6615e8a07233e923c0d0681e83ad6722aa7c7043fb5966d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:55Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:55 crc kubenswrapper[4911]: I0929 21:25:55.056626 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dnhjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fe62a53-f0b8-4c66-8adf-3b9f8bef4195\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b11f03867ce6c678d04431a19903c21acd7665f0cca4c7b0344e9d1cf0e17e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64hpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dnhjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:55Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:55 crc kubenswrapper[4911]: I0929 21:25:55.076224 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bp485" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"364060da-3bac-4f3e-b8b8-a64b0441cb5e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7dba9267c23bc8990d70b320a688e96796cf75aca99f5a3ce12fa82327cb2d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7962008bbec5a6556b4ab76cf151a68e971bde07fdc5fe560c561f42cf597d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7962008bbec5a6556b4ab76cf151a68e971bde07fdc5fe560c561f42cf597d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a6af280cf9196a9ed2a1798f7f4e7d6504ef6707c1d265fb9b232c02d70c205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a6af280cf9196a9ed2a1798f7f4e7d6504ef6707c1d265fb9b232c02d70c205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4783145a17a1233241a3766d9b259d16ed82b2076ff6790c1a368d713271cd91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4783145a17a1233241a3766d9b259d16ed82b2076ff6790c1a368d713271cd91\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0bb5fec15ae4eb585d8ba1c0bedabe62a4094ca50044a09b44439dc555f064d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0bb5fec15ae4eb585d8ba1c0bedabe62a4094ca50044a09b44439dc555f064d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c153df9063b1a1e897e8d02e139d641d00d84dce42004be4c69dd55d44e7436b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c153df9063b1a1e897e8d02e139d641d00d84dce42004be4c69dd55d44e7436b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9acf5d7a17bf6bb6d1ad415d830594b8f6b986fbb7c58706b19d28546dfae6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9acf5d7a17bf6bb6d1ad415d830594b8f6b986fbb7c58706b19d28546dfae6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bp485\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:55Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:55 crc kubenswrapper[4911]: I0929 21:25:55.092830 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dz6zq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c16af8d-e647-4820-b96f-298cce113ab1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e64c28e47bb861649d58b34f5e3c369cfaf8c808435b32a13ebfc2c02b7b6db9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbgg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dz6zq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:55Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:55 crc kubenswrapper[4911]: I0929 21:25:55.093161 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:55 crc kubenswrapper[4911]: I0929 21:25:55.093209 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:55 crc kubenswrapper[4911]: I0929 21:25:55.093219 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:55 crc kubenswrapper[4911]: I0929 21:25:55.093237 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:55 crc kubenswrapper[4911]: I0929 21:25:55.093250 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:55Z","lastTransitionTime":"2025-09-29T21:25:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:55 crc kubenswrapper[4911]: I0929 21:25:55.121000 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85fccbbf-637b-4195-89ba-828db0f10fb3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11c497bb84920c85649975dc5a3ba883723017d5503200e47c08427f45b9edfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a594d1677deac968c88fac4a64d4b047cf02ad8d6db74ee2ebe170d4135f588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2db0acd86c2dfcd0303626aa6b66832b18aec98dab908328288cdf0e0a18f24d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72188960402faa7956adf357824c6ea0a7015a75ed5191853ca95b12feb2cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf2588ddec2be0fa843630cdc268617c399a7a1615705cc4b088fc806031d557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0546d8270e2cbba06a241e421fa7ea130e53302a3f71f81829198d7ab8f3e6b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0546d8270e2cbba06a241e421fa7ea130e53302a3f71f81829198d7ab8f3e6b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123ea91d0a01f780d09196824c362eab68a4b8d943d414e80a3a0bbc2959e178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://123ea91d0a01f780d09196824c362eab68a4b8d943d414e80a3a0bbc2959e178\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://17751d3a9dd568ddb6c27efe08354f37aa4af7a91312e8c41e3e2e509d5be8ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://17751d3a9dd568ddb6c27efe08354f37aa4af7a91312e8c41e3e2e509d5be8ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:55Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:55 crc kubenswrapper[4911]: I0929 21:25:55.138501 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c6a0af8cc03379c6fa2a3969e5852a340c32bd8513dc7c963e074bb1694d436\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:55Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:55 crc kubenswrapper[4911]: I0929 21:25:55.151860 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w647f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50640abc-40db-4390-82d1-f3cfc76da71c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0479676aa753b6e60f9a57eb2a3055ddc5b10bce1547ac60e216c0865e79f24b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://568d099d7beea914cf9f9eb703e36dd179359d3f4406bae454334e39b0de79ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w647f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:55Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:55 crc kubenswrapper[4911]: I0929 21:25:55.166432 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:55Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:55 crc kubenswrapper[4911]: I0929 21:25:55.184825 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:55Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:55 crc kubenswrapper[4911]: I0929 21:25:55.196599 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:55 crc kubenswrapper[4911]: I0929 21:25:55.196737 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:55 crc kubenswrapper[4911]: I0929 21:25:55.196833 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:55 crc kubenswrapper[4911]: I0929 21:25:55.196902 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:55 crc kubenswrapper[4911]: I0929 21:25:55.196986 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:55Z","lastTransitionTime":"2025-09-29T21:25:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:55 crc kubenswrapper[4911]: I0929 21:25:55.202227 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4052e710e0a06bf38d26e6006248d090051284bc0eae7a68b17241ef8016909\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a7d7fad5efb67cc7ef796977f03cc9c0e69fcc4340ca28749fffcc13236ed8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:55Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:55 crc kubenswrapper[4911]: I0929 21:25:55.219891 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lrfbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1179c900-e866-4c5a-bb06-6032cc03a075\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bb16b2f889ccaf66b96ffea2f6585967c14530a95a95000b652ce817e97c37e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jwhfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lrfbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:55Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:55 crc kubenswrapper[4911]: I0929 21:25:55.241975 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e3aa70f-b0da-44c9-a850-96d4494b02fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3568f71a4a1762e0d9be8ce4c317471f3f99a7c5b92b7daeb9fbace778f7e66b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00681163bfe474dabf977e0c43ac71148c2a472efd4bc57f3d145ba1134a74aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db1941e9959584d925b6ac546c234ce0dfaa2960a1277f282b51deb5ee56ec87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e87c1e61cde26c6ae4496e13ebe8e46857c8656720399f3d89b9e9a75bb4681d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c06f6555e780f2f9f69cabcd3e26595acfde3e973387ed7b10345c673c17f5d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c62aa953604f4ce35ca351ecf4e03da067af96f004c3ba3ea5f3a54b89def33d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e367c219f65b163fea8802c80801d60df0804adc0f6d14f18ee540c114b4ba14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://412f7b8119d990462b71c2e7168c9940ca770637f085b4767c75af182ecba2de\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T21:25:53Z\\\",\\\"message\\\":\\\"985843 6187 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 21:25:52.986061 6187 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 21:25:52.986601 6187 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0929 21:25:52.986648 6187 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0929 21:25:52.986660 6187 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0929 21:25:52.986680 6187 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0929 21:25:52.986694 6187 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0929 21:25:52.986696 6187 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0929 21:25:52.986731 6187 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0929 21:25:52.986738 6187 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0929 21:25:52.986756 6187 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0929 21:25:52.986815 6187 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0929 21:25:52.986830 6187 factory.go:656] Stopping watch factory\\\\nI0929 21:25:52.986900 6187 ovnkube.go:599] Stopped ovnkube\\\\nI0929 21:25:52.986842 6187 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:50Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68ec54cc6e84d45b9550a2dcbd53b3ffc8cc97cb94da8d9961b29965af08b7e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9wxd8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:55Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:55 crc kubenswrapper[4911]: I0929 21:25:55.260569 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f56832a-7fd7-428e-85ee-55c5dea7b67a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ff3cf4013e69405637fe1483d5eb8ab2d12a0ba222c8446d93445dcb94ac9b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b32578d3e75bb839ad134f88850ba1b26e57846431fac71b64c5ddbce802464\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4539e6f57be0c686d5ea6ccbf1537c7c93698f81d2fdcfe5a143ce1f014e09\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57c9f68393ebed4fd5fe219544b0585a743989f8c54239e19ec702bddf9c30f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9dfa222360c949632050bd1dc5453a14c0dfa0d4614282132ceea2297405443e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T21:25:36Z\\\",\\\"message\\\":\\\"W0929 21:25:25.867198 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 21:25:25.867655 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759181125 cert, and key in /tmp/serving-cert-2091738680/serving-signer.crt, /tmp/serving-cert-2091738680/serving-signer.key\\\\nI0929 21:25:26.230899 1 observer_polling.go:159] Starting file observer\\\\nW0929 21:25:26.235864 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 21:25:26.236118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 21:25:26.238856 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2091738680/tls.crt::/tmp/serving-cert-2091738680/tls.key\\\\\\\"\\\\nF0929 21:25:36.606155 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47cf5d1e65f887e84d3c0694d5038edf3451c4af14d08a29d48562d05e36ea26\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:55Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:55 crc kubenswrapper[4911]: I0929 21:25:55.276000 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8646497-f7ad-4b38-a69f-9a14345af1c8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d3bb437bdac371dd4e22f20f304a5381c128ac2b774c51bc4f748a1c7aa3834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://129d51f84c8798ed7b1078f34899d4231c7bf4e1a75212f694c6c027b8463b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://290e37c77297fae7dcb3540911f6b2917e26daee3623c9106e34de394ccfa01f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7088f5a3752b8778de247eacb408d511197d1501e8e2811a98a9b713ca1fca04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:55Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:55 crc kubenswrapper[4911]: I0929 21:25:55.300104 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:55 crc kubenswrapper[4911]: I0929 21:25:55.300164 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:55 crc kubenswrapper[4911]: I0929 21:25:55.300184 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:55 crc kubenswrapper[4911]: I0929 21:25:55.300213 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:55 crc kubenswrapper[4911]: I0929 21:25:55.300234 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:55Z","lastTransitionTime":"2025-09-29T21:25:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:55 crc kubenswrapper[4911]: I0929 21:25:55.403502 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:55 crc kubenswrapper[4911]: I0929 21:25:55.403586 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:55 crc kubenswrapper[4911]: I0929 21:25:55.403614 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:55 crc kubenswrapper[4911]: I0929 21:25:55.403650 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:55 crc kubenswrapper[4911]: I0929 21:25:55.403676 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:55Z","lastTransitionTime":"2025-09-29T21:25:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:55 crc kubenswrapper[4911]: I0929 21:25:55.506614 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:55 crc kubenswrapper[4911]: I0929 21:25:55.506695 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:55 crc kubenswrapper[4911]: I0929 21:25:55.506718 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:55 crc kubenswrapper[4911]: I0929 21:25:55.506753 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:55 crc kubenswrapper[4911]: I0929 21:25:55.506868 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:55Z","lastTransitionTime":"2025-09-29T21:25:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:55 crc kubenswrapper[4911]: I0929 21:25:55.610326 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:55 crc kubenswrapper[4911]: I0929 21:25:55.610423 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:55 crc kubenswrapper[4911]: I0929 21:25:55.610450 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:55 crc kubenswrapper[4911]: I0929 21:25:55.610481 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:55 crc kubenswrapper[4911]: I0929 21:25:55.610506 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:55Z","lastTransitionTime":"2025-09-29T21:25:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:55 crc kubenswrapper[4911]: I0929 21:25:55.701032 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 21:25:55 crc kubenswrapper[4911]: E0929 21:25:55.701222 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 21:25:55 crc kubenswrapper[4911]: I0929 21:25:55.713306 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:55 crc kubenswrapper[4911]: I0929 21:25:55.713371 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:55 crc kubenswrapper[4911]: I0929 21:25:55.713395 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:55 crc kubenswrapper[4911]: I0929 21:25:55.713427 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:55 crc kubenswrapper[4911]: I0929 21:25:55.713452 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:55Z","lastTransitionTime":"2025-09-29T21:25:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:55 crc kubenswrapper[4911]: I0929 21:25:55.817063 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:55 crc kubenswrapper[4911]: I0929 21:25:55.817133 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:55 crc kubenswrapper[4911]: I0929 21:25:55.817157 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:55 crc kubenswrapper[4911]: I0929 21:25:55.817191 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:55 crc kubenswrapper[4911]: I0929 21:25:55.817216 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:55Z","lastTransitionTime":"2025-09-29T21:25:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:55 crc kubenswrapper[4911]: I0929 21:25:55.920686 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:55 crc kubenswrapper[4911]: I0929 21:25:55.920753 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:55 crc kubenswrapper[4911]: I0929 21:25:55.920770 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:55 crc kubenswrapper[4911]: I0929 21:25:55.920839 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:55 crc kubenswrapper[4911]: I0929 21:25:55.920878 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:55Z","lastTransitionTime":"2025-09-29T21:25:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.019599 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-9wxd8_4e3aa70f-b0da-44c9-a850-96d4494b02fc/ovnkube-controller/1.log" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.021919 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-9wxd8_4e3aa70f-b0da-44c9-a850-96d4494b02fc/ovnkube-controller/0.log" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.023267 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.023435 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.023558 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.023718 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.023931 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:56Z","lastTransitionTime":"2025-09-29T21:25:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.027716 4911 generic.go:334] "Generic (PLEG): container finished" podID="4e3aa70f-b0da-44c9-a850-96d4494b02fc" containerID="e367c219f65b163fea8802c80801d60df0804adc0f6d14f18ee540c114b4ba14" exitCode=1 Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.027852 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" event={"ID":"4e3aa70f-b0da-44c9-a850-96d4494b02fc","Type":"ContainerDied","Data":"e367c219f65b163fea8802c80801d60df0804adc0f6d14f18ee540c114b4ba14"} Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.028221 4911 scope.go:117] "RemoveContainer" containerID="412f7b8119d990462b71c2e7168c9940ca770637f085b4767c75af182ecba2de" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.029383 4911 scope.go:117] "RemoveContainer" containerID="e367c219f65b163fea8802c80801d60df0804adc0f6d14f18ee540c114b4ba14" Sep 29 21:25:56 crc kubenswrapper[4911]: E0929 21:25:56.029658 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-9wxd8_openshift-ovn-kubernetes(4e3aa70f-b0da-44c9-a850-96d4494b02fc)\"" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" podUID="4e3aa70f-b0da-44c9-a850-96d4494b02fc" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.050387 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w647f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50640abc-40db-4390-82d1-f3cfc76da71c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0479676aa753b6e60f9a57eb2a3055ddc5b10bce1547ac60e216c0865e79f24b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://568d099d7beea914cf9f9eb703e36dd179359d3f4406bae454334e39b0de79ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w647f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:56Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.073400 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4052e710e0a06bf38d26e6006248d090051284bc0eae7a68b17241ef8016909\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a7d7fad5efb67cc7ef796977f03cc9c0e69fcc4340ca28749fffcc13236ed8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:56Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.097587 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lrfbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1179c900-e866-4c5a-bb06-6032cc03a075\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bb16b2f889ccaf66b96ffea2f6585967c14530a95a95000b652ce817e97c37e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jwhfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lrfbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:56Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.125149 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e3aa70f-b0da-44c9-a850-96d4494b02fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3568f71a4a1762e0d9be8ce4c317471f3f99a7c5b92b7daeb9fbace778f7e66b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00681163bfe474dabf977e0c43ac71148c2a472efd4bc57f3d145ba1134a74aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db1941e9959584d925b6ac546c234ce0dfaa2960a1277f282b51deb5ee56ec87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e87c1e61cde26c6ae4496e13ebe8e46857c8656720399f3d89b9e9a75bb4681d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c06f6555e780f2f9f69cabcd3e26595acfde3e973387ed7b10345c673c17f5d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c62aa953604f4ce35ca351ecf4e03da067af96f004c3ba3ea5f3a54b89def33d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e367c219f65b163fea8802c80801d60df0804adc0f6d14f18ee540c114b4ba14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://412f7b8119d990462b71c2e7168c9940ca770637f085b4767c75af182ecba2de\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T21:25:53Z\\\",\\\"message\\\":\\\"985843 6187 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 21:25:52.986061 6187 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 21:25:52.986601 6187 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0929 21:25:52.986648 6187 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0929 21:25:52.986660 6187 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0929 21:25:52.986680 6187 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0929 21:25:52.986694 6187 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0929 21:25:52.986696 6187 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0929 21:25:52.986731 6187 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0929 21:25:52.986738 6187 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0929 21:25:52.986756 6187 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0929 21:25:52.986815 6187 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0929 21:25:52.986830 6187 factory.go:656] Stopping watch factory\\\\nI0929 21:25:52.986900 6187 ovnkube.go:599] Stopped ovnkube\\\\nI0929 21:25:52.986842 6187 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:50Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e367c219f65b163fea8802c80801d60df0804adc0f6d14f18ee540c114b4ba14\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T21:25:55Z\\\",\\\"message\\\":\\\"troller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:55Z is after 2025-08-24T17:21:41Z]\\\\nI0929 21:25:55.105695 6368 services_controller.go:453] Built service openshift-service-ca-operator/metrics template LB for network=default: []services.LB{}\\\\nI0929 21:25:55.105686 6368 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/cluster-autoscaler-operator_TCP_cluster\\\\\\\", UUID:\\\\\\\"54fbe873-7e6d-475f-a0ad-8dd5f06d850d\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/cluster-autoscaler-operator\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/cluster-autoscaler-operator_TCP_cluster\\\\\\\", UUI\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68ec54cc6e84d45b9550a2dcbd53b3ffc8cc97cb94da8d9961b29965af08b7e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9wxd8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:56Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.129680 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.129744 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.129762 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.129826 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.129847 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:56Z","lastTransitionTime":"2025-09-29T21:25:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.152462 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f56832a-7fd7-428e-85ee-55c5dea7b67a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ff3cf4013e69405637fe1483d5eb8ab2d12a0ba222c8446d93445dcb94ac9b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b32578d3e75bb839ad134f88850ba1b26e57846431fac71b64c5ddbce802464\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4539e6f57be0c686d5ea6ccbf1537c7c93698f81d2fdcfe5a143ce1f014e09\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57c9f68393ebed4fd5fe219544b0585a743989f8c54239e19ec702bddf9c30f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9dfa222360c949632050bd1dc5453a14c0dfa0d4614282132ceea2297405443e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T21:25:36Z\\\",\\\"message\\\":\\\"W0929 21:25:25.867198 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 21:25:25.867655 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759181125 cert, and key in /tmp/serving-cert-2091738680/serving-signer.crt, /tmp/serving-cert-2091738680/serving-signer.key\\\\nI0929 21:25:26.230899 1 observer_polling.go:159] Starting file observer\\\\nW0929 21:25:26.235864 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 21:25:26.236118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 21:25:26.238856 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2091738680/tls.crt::/tmp/serving-cert-2091738680/tls.key\\\\\\\"\\\\nF0929 21:25:36.606155 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47cf5d1e65f887e84d3c0694d5038edf3451c4af14d08a29d48562d05e36ea26\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:56Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.176057 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8646497-f7ad-4b38-a69f-9a14345af1c8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d3bb437bdac371dd4e22f20f304a5381c128ac2b774c51bc4f748a1c7aa3834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://129d51f84c8798ed7b1078f34899d4231c7bf4e1a75212f694c6c027b8463b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://290e37c77297fae7dcb3540911f6b2917e26daee3623c9106e34de394ccfa01f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7088f5a3752b8778de247eacb408d511197d1501e8e2811a98a9b713ca1fca04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:56Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.191584 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:56Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.206663 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:56Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.226259 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:56Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.230577 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.232174 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.232287 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.232392 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.232473 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.232553 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:56Z","lastTransitionTime":"2025-09-29T21:25:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.242199 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a92f3bb864cba0c6615e8a07233e923c0d0681e83ad6722aa7c7043fb5966d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:56Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.258322 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dz6zq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c16af8d-e647-4820-b96f-298cce113ab1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e64c28e47bb861649d58b34f5e3c369cfaf8c808435b32a13ebfc2c02b7b6db9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbgg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dz6zq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:56Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.289753 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85fccbbf-637b-4195-89ba-828db0f10fb3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11c497bb84920c85649975dc5a3ba883723017d5503200e47c08427f45b9edfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a594d1677deac968c88fac4a64d4b047cf02ad8d6db74ee2ebe170d4135f588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2db0acd86c2dfcd0303626aa6b66832b18aec98dab908328288cdf0e0a18f24d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72188960402faa7956adf357824c6ea0a7015a75ed5191853ca95b12feb2cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf2588ddec2be0fa843630cdc268617c399a7a1615705cc4b088fc806031d557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0546d8270e2cbba06a241e421fa7ea130e53302a3f71f81829198d7ab8f3e6b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0546d8270e2cbba06a241e421fa7ea130e53302a3f71f81829198d7ab8f3e6b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123ea91d0a01f780d09196824c362eab68a4b8d943d414e80a3a0bbc2959e178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://123ea91d0a01f780d09196824c362eab68a4b8d943d414e80a3a0bbc2959e178\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://17751d3a9dd568ddb6c27efe08354f37aa4af7a91312e8c41e3e2e509d5be8ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://17751d3a9dd568ddb6c27efe08354f37aa4af7a91312e8c41e3e2e509d5be8ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:56Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.309884 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c6a0af8cc03379c6fa2a3969e5852a340c32bd8513dc7c963e074bb1694d436\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:56Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.324787 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dnhjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fe62a53-f0b8-4c66-8adf-3b9f8bef4195\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b11f03867ce6c678d04431a19903c21acd7665f0cca4c7b0344e9d1cf0e17e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64hpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dnhjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:56Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.338195 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.338253 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.338273 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.338301 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.338320 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:56Z","lastTransitionTime":"2025-09-29T21:25:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.346647 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bp485" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"364060da-3bac-4f3e-b8b8-a64b0441cb5e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7dba9267c23bc8990d70b320a688e96796cf75aca99f5a3ce12fa82327cb2d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7962008bbec5a6556b4ab76cf151a68e971bde07fdc5fe560c561f42cf597d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7962008bbec5a6556b4ab76cf151a68e971bde07fdc5fe560c561f42cf597d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a6af280cf9196a9ed2a1798f7f4e7d6504ef6707c1d265fb9b232c02d70c205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a6af280cf9196a9ed2a1798f7f4e7d6504ef6707c1d265fb9b232c02d70c205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4783145a17a1233241a3766d9b259d16ed82b2076ff6790c1a368d713271cd91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4783145a17a1233241a3766d9b259d16ed82b2076ff6790c1a368d713271cd91\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0bb5fec15ae4eb585d8ba1c0bedabe62a4094ca50044a09b44439dc555f064d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0bb5fec15ae4eb585d8ba1c0bedabe62a4094ca50044a09b44439dc555f064d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c153df9063b1a1e897e8d02e139d641d00d84dce42004be4c69dd55d44e7436b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c153df9063b1a1e897e8d02e139d641d00d84dce42004be4c69dd55d44e7436b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9acf5d7a17bf6bb6d1ad415d830594b8f6b986fbb7c58706b19d28546dfae6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9acf5d7a17bf6bb6d1ad415d830594b8f6b986fbb7c58706b19d28546dfae6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bp485\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:56Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.441735 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.441849 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.441873 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.441904 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.441928 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:56Z","lastTransitionTime":"2025-09-29T21:25:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.510673 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cfbgk"] Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.511607 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cfbgk" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.514195 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.515854 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.534303 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:56Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.544704 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.544743 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.544753 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.544771 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.544784 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:56Z","lastTransitionTime":"2025-09-29T21:25:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.554525 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a92f3bb864cba0c6615e8a07233e923c0d0681e83ad6722aa7c7043fb5966d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:56Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.575560 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cfbgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5b94596-b945-4a89-b362-ec649e8e7981\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm7xf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm7xf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:56Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-cfbgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:56Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.580167 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/b5b94596-b945-4a89-b362-ec649e8e7981-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-cfbgk\" (UID: \"b5b94596-b945-4a89-b362-ec649e8e7981\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cfbgk" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.580258 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b5b94596-b945-4a89-b362-ec649e8e7981-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-cfbgk\" (UID: \"b5b94596-b945-4a89-b362-ec649e8e7981\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cfbgk" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.580365 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fm7xf\" (UniqueName: \"kubernetes.io/projected/b5b94596-b945-4a89-b362-ec649e8e7981-kube-api-access-fm7xf\") pod \"ovnkube-control-plane-749d76644c-cfbgk\" (UID: \"b5b94596-b945-4a89-b362-ec649e8e7981\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cfbgk" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.580469 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/b5b94596-b945-4a89-b362-ec649e8e7981-env-overrides\") pod \"ovnkube-control-plane-749d76644c-cfbgk\" (UID: \"b5b94596-b945-4a89-b362-ec649e8e7981\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cfbgk" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.595077 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dnhjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fe62a53-f0b8-4c66-8adf-3b9f8bef4195\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b11f03867ce6c678d04431a19903c21acd7665f0cca4c7b0344e9d1cf0e17e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64hpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dnhjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:56Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.614063 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bp485" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"364060da-3bac-4f3e-b8b8-a64b0441cb5e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7dba9267c23bc8990d70b320a688e96796cf75aca99f5a3ce12fa82327cb2d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7962008bbec5a6556b4ab76cf151a68e971bde07fdc5fe560c561f42cf597d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7962008bbec5a6556b4ab76cf151a68e971bde07fdc5fe560c561f42cf597d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a6af280cf9196a9ed2a1798f7f4e7d6504ef6707c1d265fb9b232c02d70c205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a6af280cf9196a9ed2a1798f7f4e7d6504ef6707c1d265fb9b232c02d70c205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4783145a17a1233241a3766d9b259d16ed82b2076ff6790c1a368d713271cd91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4783145a17a1233241a3766d9b259d16ed82b2076ff6790c1a368d713271cd91\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0bb5fec15ae4eb585d8ba1c0bedabe62a4094ca50044a09b44439dc555f064d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0bb5fec15ae4eb585d8ba1c0bedabe62a4094ca50044a09b44439dc555f064d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c153df9063b1a1e897e8d02e139d641d00d84dce42004be4c69dd55d44e7436b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c153df9063b1a1e897e8d02e139d641d00d84dce42004be4c69dd55d44e7436b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9acf5d7a17bf6bb6d1ad415d830594b8f6b986fbb7c58706b19d28546dfae6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9acf5d7a17bf6bb6d1ad415d830594b8f6b986fbb7c58706b19d28546dfae6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bp485\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:56Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.626979 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dz6zq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c16af8d-e647-4820-b96f-298cce113ab1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e64c28e47bb861649d58b34f5e3c369cfaf8c808435b32a13ebfc2c02b7b6db9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbgg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dz6zq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:56Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.647599 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.647647 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.647657 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.647673 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.647684 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:56Z","lastTransitionTime":"2025-09-29T21:25:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.647656 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85fccbbf-637b-4195-89ba-828db0f10fb3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11c497bb84920c85649975dc5a3ba883723017d5503200e47c08427f45b9edfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a594d1677deac968c88fac4a64d4b047cf02ad8d6db74ee2ebe170d4135f588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2db0acd86c2dfcd0303626aa6b66832b18aec98dab908328288cdf0e0a18f24d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72188960402faa7956adf357824c6ea0a7015a75ed5191853ca95b12feb2cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf2588ddec2be0fa843630cdc268617c399a7a1615705cc4b088fc806031d557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0546d8270e2cbba06a241e421fa7ea130e53302a3f71f81829198d7ab8f3e6b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0546d8270e2cbba06a241e421fa7ea130e53302a3f71f81829198d7ab8f3e6b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123ea91d0a01f780d09196824c362eab68a4b8d943d414e80a3a0bbc2959e178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://123ea91d0a01f780d09196824c362eab68a4b8d943d414e80a3a0bbc2959e178\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://17751d3a9dd568ddb6c27efe08354f37aa4af7a91312e8c41e3e2e509d5be8ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://17751d3a9dd568ddb6c27efe08354f37aa4af7a91312e8c41e3e2e509d5be8ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:56Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.663044 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c6a0af8cc03379c6fa2a3969e5852a340c32bd8513dc7c963e074bb1694d436\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:56Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.676417 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w647f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50640abc-40db-4390-82d1-f3cfc76da71c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0479676aa753b6e60f9a57eb2a3055ddc5b10bce1547ac60e216c0865e79f24b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://568d099d7beea914cf9f9eb703e36dd179359d3f4406bae454334e39b0de79ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w647f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:56Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.681920 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/b5b94596-b945-4a89-b362-ec649e8e7981-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-cfbgk\" (UID: \"b5b94596-b945-4a89-b362-ec649e8e7981\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cfbgk" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.681970 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b5b94596-b945-4a89-b362-ec649e8e7981-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-cfbgk\" (UID: \"b5b94596-b945-4a89-b362-ec649e8e7981\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cfbgk" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.682042 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fm7xf\" (UniqueName: \"kubernetes.io/projected/b5b94596-b945-4a89-b362-ec649e8e7981-kube-api-access-fm7xf\") pod \"ovnkube-control-plane-749d76644c-cfbgk\" (UID: \"b5b94596-b945-4a89-b362-ec649e8e7981\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cfbgk" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.682099 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/b5b94596-b945-4a89-b362-ec649e8e7981-env-overrides\") pod \"ovnkube-control-plane-749d76644c-cfbgk\" (UID: \"b5b94596-b945-4a89-b362-ec649e8e7981\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cfbgk" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.682778 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/b5b94596-b945-4a89-b362-ec649e8e7981-env-overrides\") pod \"ovnkube-control-plane-749d76644c-cfbgk\" (UID: \"b5b94596-b945-4a89-b362-ec649e8e7981\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cfbgk" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.683761 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/b5b94596-b945-4a89-b362-ec649e8e7981-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-cfbgk\" (UID: \"b5b94596-b945-4a89-b362-ec649e8e7981\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cfbgk" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.695416 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b5b94596-b945-4a89-b362-ec649e8e7981-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-cfbgk\" (UID: \"b5b94596-b945-4a89-b362-ec649e8e7981\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cfbgk" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.696451 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:56Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.700479 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.700487 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 21:25:56 crc kubenswrapper[4911]: E0929 21:25:56.700677 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 21:25:56 crc kubenswrapper[4911]: E0929 21:25:56.700914 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.707386 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fm7xf\" (UniqueName: \"kubernetes.io/projected/b5b94596-b945-4a89-b362-ec649e8e7981-kube-api-access-fm7xf\") pod \"ovnkube-control-plane-749d76644c-cfbgk\" (UID: \"b5b94596-b945-4a89-b362-ec649e8e7981\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cfbgk" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.719635 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:56Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.737941 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4052e710e0a06bf38d26e6006248d090051284bc0eae7a68b17241ef8016909\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a7d7fad5efb67cc7ef796977f03cc9c0e69fcc4340ca28749fffcc13236ed8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:56Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.750625 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.750674 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.750687 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.750709 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.750724 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:56Z","lastTransitionTime":"2025-09-29T21:25:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.754537 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lrfbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1179c900-e866-4c5a-bb06-6032cc03a075\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bb16b2f889ccaf66b96ffea2f6585967c14530a95a95000b652ce817e97c37e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jwhfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lrfbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:56Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.778865 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e3aa70f-b0da-44c9-a850-96d4494b02fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3568f71a4a1762e0d9be8ce4c317471f3f99a7c5b92b7daeb9fbace778f7e66b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00681163bfe474dabf977e0c43ac71148c2a472efd4bc57f3d145ba1134a74aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db1941e9959584d925b6ac546c234ce0dfaa2960a1277f282b51deb5ee56ec87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e87c1e61cde26c6ae4496e13ebe8e46857c8656720399f3d89b9e9a75bb4681d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c06f6555e780f2f9f69cabcd3e26595acfde3e973387ed7b10345c673c17f5d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c62aa953604f4ce35ca351ecf4e03da067af96f004c3ba3ea5f3a54b89def33d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e367c219f65b163fea8802c80801d60df0804adc0f6d14f18ee540c114b4ba14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://412f7b8119d990462b71c2e7168c9940ca770637f085b4767c75af182ecba2de\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T21:25:53Z\\\",\\\"message\\\":\\\"985843 6187 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 21:25:52.986061 6187 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 21:25:52.986601 6187 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0929 21:25:52.986648 6187 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0929 21:25:52.986660 6187 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0929 21:25:52.986680 6187 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0929 21:25:52.986694 6187 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0929 21:25:52.986696 6187 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0929 21:25:52.986731 6187 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0929 21:25:52.986738 6187 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0929 21:25:52.986756 6187 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0929 21:25:52.986815 6187 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0929 21:25:52.986830 6187 factory.go:656] Stopping watch factory\\\\nI0929 21:25:52.986900 6187 ovnkube.go:599] Stopped ovnkube\\\\nI0929 21:25:52.986842 6187 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:50Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e367c219f65b163fea8802c80801d60df0804adc0f6d14f18ee540c114b4ba14\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T21:25:55Z\\\",\\\"message\\\":\\\"troller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:55Z is after 2025-08-24T17:21:41Z]\\\\nI0929 21:25:55.105695 6368 services_controller.go:453] Built service openshift-service-ca-operator/metrics template LB for network=default: []services.LB{}\\\\nI0929 21:25:55.105686 6368 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/cluster-autoscaler-operator_TCP_cluster\\\\\\\", UUID:\\\\\\\"54fbe873-7e6d-475f-a0ad-8dd5f06d850d\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/cluster-autoscaler-operator\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/cluster-autoscaler-operator_TCP_cluster\\\\\\\", UUI\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68ec54cc6e84d45b9550a2dcbd53b3ffc8cc97cb94da8d9961b29965af08b7e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9wxd8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:56Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.798273 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f56832a-7fd7-428e-85ee-55c5dea7b67a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ff3cf4013e69405637fe1483d5eb8ab2d12a0ba222c8446d93445dcb94ac9b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b32578d3e75bb839ad134f88850ba1b26e57846431fac71b64c5ddbce802464\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4539e6f57be0c686d5ea6ccbf1537c7c93698f81d2fdcfe5a143ce1f014e09\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57c9f68393ebed4fd5fe219544b0585a743989f8c54239e19ec702bddf9c30f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9dfa222360c949632050bd1dc5453a14c0dfa0d4614282132ceea2297405443e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T21:25:36Z\\\",\\\"message\\\":\\\"W0929 21:25:25.867198 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 21:25:25.867655 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759181125 cert, and key in /tmp/serving-cert-2091738680/serving-signer.crt, /tmp/serving-cert-2091738680/serving-signer.key\\\\nI0929 21:25:26.230899 1 observer_polling.go:159] Starting file observer\\\\nW0929 21:25:26.235864 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 21:25:26.236118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 21:25:26.238856 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2091738680/tls.crt::/tmp/serving-cert-2091738680/tls.key\\\\\\\"\\\\nF0929 21:25:36.606155 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47cf5d1e65f887e84d3c0694d5038edf3451c4af14d08a29d48562d05e36ea26\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:56Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.817616 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8646497-f7ad-4b38-a69f-9a14345af1c8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d3bb437bdac371dd4e22f20f304a5381c128ac2b774c51bc4f748a1c7aa3834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://129d51f84c8798ed7b1078f34899d4231c7bf4e1a75212f694c6c027b8463b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://290e37c77297fae7dcb3540911f6b2917e26daee3623c9106e34de394ccfa01f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7088f5a3752b8778de247eacb408d511197d1501e8e2811a98a9b713ca1fca04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:56Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.837749 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cfbgk" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.853777 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.853848 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.853863 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.853885 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.853898 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:56Z","lastTransitionTime":"2025-09-29T21:25:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:56 crc kubenswrapper[4911]: W0929 21:25:56.862695 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb5b94596_b945_4a89_b362_ec649e8e7981.slice/crio-f560565dd0f1c5ed4031aecb481b318b3ed7799ab8084a10dc98bafa60327654 WatchSource:0}: Error finding container f560565dd0f1c5ed4031aecb481b318b3ed7799ab8084a10dc98bafa60327654: Status 404 returned error can't find the container with id f560565dd0f1c5ed4031aecb481b318b3ed7799ab8084a10dc98bafa60327654 Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.958007 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.958071 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.958082 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.958105 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:56 crc kubenswrapper[4911]: I0929 21:25:56.958116 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:56Z","lastTransitionTime":"2025-09-29T21:25:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.043169 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-9wxd8_4e3aa70f-b0da-44c9-a850-96d4494b02fc/ovnkube-controller/1.log" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.048623 4911 scope.go:117] "RemoveContainer" containerID="e367c219f65b163fea8802c80801d60df0804adc0f6d14f18ee540c114b4ba14" Sep 29 21:25:57 crc kubenswrapper[4911]: E0929 21:25:57.048935 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-9wxd8_openshift-ovn-kubernetes(4e3aa70f-b0da-44c9-a850-96d4494b02fc)\"" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" podUID="4e3aa70f-b0da-44c9-a850-96d4494b02fc" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.059129 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cfbgk" event={"ID":"b5b94596-b945-4a89-b362-ec649e8e7981","Type":"ContainerStarted","Data":"f560565dd0f1c5ed4031aecb481b318b3ed7799ab8084a10dc98bafa60327654"} Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.060632 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.060723 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.060841 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.060874 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.060940 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:57Z","lastTransitionTime":"2025-09-29T21:25:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.070485 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:57Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.084770 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a92f3bb864cba0c6615e8a07233e923c0d0681e83ad6722aa7c7043fb5966d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:57Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.099646 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cfbgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5b94596-b945-4a89-b362-ec649e8e7981\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm7xf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm7xf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:56Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-cfbgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:57Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.123739 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85fccbbf-637b-4195-89ba-828db0f10fb3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11c497bb84920c85649975dc5a3ba883723017d5503200e47c08427f45b9edfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a594d1677deac968c88fac4a64d4b047cf02ad8d6db74ee2ebe170d4135f588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2db0acd86c2dfcd0303626aa6b66832b18aec98dab908328288cdf0e0a18f24d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72188960402faa7956adf357824c6ea0a7015a75ed5191853ca95b12feb2cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf2588ddec2be0fa843630cdc268617c399a7a1615705cc4b088fc806031d557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0546d8270e2cbba06a241e421fa7ea130e53302a3f71f81829198d7ab8f3e6b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0546d8270e2cbba06a241e421fa7ea130e53302a3f71f81829198d7ab8f3e6b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123ea91d0a01f780d09196824c362eab68a4b8d943d414e80a3a0bbc2959e178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://123ea91d0a01f780d09196824c362eab68a4b8d943d414e80a3a0bbc2959e178\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://17751d3a9dd568ddb6c27efe08354f37aa4af7a91312e8c41e3e2e509d5be8ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://17751d3a9dd568ddb6c27efe08354f37aa4af7a91312e8c41e3e2e509d5be8ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:57Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.142384 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c6a0af8cc03379c6fa2a3969e5852a340c32bd8513dc7c963e074bb1694d436\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:57Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.153724 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dnhjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fe62a53-f0b8-4c66-8adf-3b9f8bef4195\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b11f03867ce6c678d04431a19903c21acd7665f0cca4c7b0344e9d1cf0e17e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64hpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dnhjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:57Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.165169 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.165247 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.165264 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.165287 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.165331 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:57Z","lastTransitionTime":"2025-09-29T21:25:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.171404 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bp485" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"364060da-3bac-4f3e-b8b8-a64b0441cb5e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7dba9267c23bc8990d70b320a688e96796cf75aca99f5a3ce12fa82327cb2d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7962008bbec5a6556b4ab76cf151a68e971bde07fdc5fe560c561f42cf597d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7962008bbec5a6556b4ab76cf151a68e971bde07fdc5fe560c561f42cf597d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a6af280cf9196a9ed2a1798f7f4e7d6504ef6707c1d265fb9b232c02d70c205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a6af280cf9196a9ed2a1798f7f4e7d6504ef6707c1d265fb9b232c02d70c205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4783145a17a1233241a3766d9b259d16ed82b2076ff6790c1a368d713271cd91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4783145a17a1233241a3766d9b259d16ed82b2076ff6790c1a368d713271cd91\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0bb5fec15ae4eb585d8ba1c0bedabe62a4094ca50044a09b44439dc555f064d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0bb5fec15ae4eb585d8ba1c0bedabe62a4094ca50044a09b44439dc555f064d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c153df9063b1a1e897e8d02e139d641d00d84dce42004be4c69dd55d44e7436b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c153df9063b1a1e897e8d02e139d641d00d84dce42004be4c69dd55d44e7436b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9acf5d7a17bf6bb6d1ad415d830594b8f6b986fbb7c58706b19d28546dfae6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9acf5d7a17bf6bb6d1ad415d830594b8f6b986fbb7c58706b19d28546dfae6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bp485\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:57Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.185460 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dz6zq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c16af8d-e647-4820-b96f-298cce113ab1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e64c28e47bb861649d58b34f5e3c369cfaf8c808435b32a13ebfc2c02b7b6db9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbgg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dz6zq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:57Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.201228 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w647f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50640abc-40db-4390-82d1-f3cfc76da71c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0479676aa753b6e60f9a57eb2a3055ddc5b10bce1547ac60e216c0865e79f24b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://568d099d7beea914cf9f9eb703e36dd179359d3f4406bae454334e39b0de79ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w647f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:57Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.219243 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lrfbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1179c900-e866-4c5a-bb06-6032cc03a075\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bb16b2f889ccaf66b96ffea2f6585967c14530a95a95000b652ce817e97c37e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jwhfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lrfbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:57Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.250174 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e3aa70f-b0da-44c9-a850-96d4494b02fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3568f71a4a1762e0d9be8ce4c317471f3f99a7c5b92b7daeb9fbace778f7e66b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00681163bfe474dabf977e0c43ac71148c2a472efd4bc57f3d145ba1134a74aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db1941e9959584d925b6ac546c234ce0dfaa2960a1277f282b51deb5ee56ec87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e87c1e61cde26c6ae4496e13ebe8e46857c8656720399f3d89b9e9a75bb4681d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c06f6555e780f2f9f69cabcd3e26595acfde3e973387ed7b10345c673c17f5d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c62aa953604f4ce35ca351ecf4e03da067af96f004c3ba3ea5f3a54b89def33d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e367c219f65b163fea8802c80801d60df0804adc0f6d14f18ee540c114b4ba14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e367c219f65b163fea8802c80801d60df0804adc0f6d14f18ee540c114b4ba14\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T21:25:55Z\\\",\\\"message\\\":\\\"troller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:55Z is after 2025-08-24T17:21:41Z]\\\\nI0929 21:25:55.105695 6368 services_controller.go:453] Built service openshift-service-ca-operator/metrics template LB for network=default: []services.LB{}\\\\nI0929 21:25:55.105686 6368 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/cluster-autoscaler-operator_TCP_cluster\\\\\\\", UUID:\\\\\\\"54fbe873-7e6d-475f-a0ad-8dd5f06d850d\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/cluster-autoscaler-operator\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/cluster-autoscaler-operator_TCP_cluster\\\\\\\", UUI\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:54Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-9wxd8_openshift-ovn-kubernetes(4e3aa70f-b0da-44c9-a850-96d4494b02fc)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68ec54cc6e84d45b9550a2dcbd53b3ffc8cc97cb94da8d9961b29965af08b7e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9wxd8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:57Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.276229 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.276299 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.276314 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.276334 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.276346 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:57Z","lastTransitionTime":"2025-09-29T21:25:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.278634 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-d5gdh"] Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.279496 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-d5gdh" Sep 29 21:25:57 crc kubenswrapper[4911]: E0929 21:25:57.279578 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-d5gdh" podUID="b53f9593-39bf-43e0-b1de-09192d0167cd" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.289131 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f56832a-7fd7-428e-85ee-55c5dea7b67a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ff3cf4013e69405637fe1483d5eb8ab2d12a0ba222c8446d93445dcb94ac9b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b32578d3e75bb839ad134f88850ba1b26e57846431fac71b64c5ddbce802464\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4539e6f57be0c686d5ea6ccbf1537c7c93698f81d2fdcfe5a143ce1f014e09\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57c9f68393ebed4fd5fe219544b0585a743989f8c54239e19ec702bddf9c30f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9dfa222360c949632050bd1dc5453a14c0dfa0d4614282132ceea2297405443e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T21:25:36Z\\\",\\\"message\\\":\\\"W0929 21:25:25.867198 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 21:25:25.867655 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759181125 cert, and key in /tmp/serving-cert-2091738680/serving-signer.crt, /tmp/serving-cert-2091738680/serving-signer.key\\\\nI0929 21:25:26.230899 1 observer_polling.go:159] Starting file observer\\\\nW0929 21:25:26.235864 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 21:25:26.236118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 21:25:26.238856 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2091738680/tls.crt::/tmp/serving-cert-2091738680/tls.key\\\\\\\"\\\\nF0929 21:25:36.606155 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47cf5d1e65f887e84d3c0694d5038edf3451c4af14d08a29d48562d05e36ea26\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:57Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.318905 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8646497-f7ad-4b38-a69f-9a14345af1c8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d3bb437bdac371dd4e22f20f304a5381c128ac2b774c51bc4f748a1c7aa3834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://129d51f84c8798ed7b1078f34899d4231c7bf4e1a75212f694c6c027b8463b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://290e37c77297fae7dcb3540911f6b2917e26daee3623c9106e34de394ccfa01f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7088f5a3752b8778de247eacb408d511197d1501e8e2811a98a9b713ca1fca04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:57Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.352482 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:57Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.371429 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:57Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.378859 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.378922 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.378938 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.378959 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.378972 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:57Z","lastTransitionTime":"2025-09-29T21:25:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.385144 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4052e710e0a06bf38d26e6006248d090051284bc0eae7a68b17241ef8016909\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a7d7fad5efb67cc7ef796977f03cc9c0e69fcc4340ca28749fffcc13236ed8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:57Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.392423 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-24xkh\" (UniqueName: \"kubernetes.io/projected/b53f9593-39bf-43e0-b1de-09192d0167cd-kube-api-access-24xkh\") pod \"network-metrics-daemon-d5gdh\" (UID: \"b53f9593-39bf-43e0-b1de-09192d0167cd\") " pod="openshift-multus/network-metrics-daemon-d5gdh" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.392484 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b53f9593-39bf-43e0-b1de-09192d0167cd-metrics-certs\") pod \"network-metrics-daemon-d5gdh\" (UID: \"b53f9593-39bf-43e0-b1de-09192d0167cd\") " pod="openshift-multus/network-metrics-daemon-d5gdh" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.398095 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:57Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.412519 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:57Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.430350 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4052e710e0a06bf38d26e6006248d090051284bc0eae7a68b17241ef8016909\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a7d7fad5efb67cc7ef796977f03cc9c0e69fcc4340ca28749fffcc13236ed8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:57Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.446302 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lrfbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1179c900-e866-4c5a-bb06-6032cc03a075\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bb16b2f889ccaf66b96ffea2f6585967c14530a95a95000b652ce817e97c37e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jwhfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lrfbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:57Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.475838 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e3aa70f-b0da-44c9-a850-96d4494b02fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3568f71a4a1762e0d9be8ce4c317471f3f99a7c5b92b7daeb9fbace778f7e66b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00681163bfe474dabf977e0c43ac71148c2a472efd4bc57f3d145ba1134a74aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db1941e9959584d925b6ac546c234ce0dfaa2960a1277f282b51deb5ee56ec87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e87c1e61cde26c6ae4496e13ebe8e46857c8656720399f3d89b9e9a75bb4681d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c06f6555e780f2f9f69cabcd3e26595acfde3e973387ed7b10345c673c17f5d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c62aa953604f4ce35ca351ecf4e03da067af96f004c3ba3ea5f3a54b89def33d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e367c219f65b163fea8802c80801d60df0804adc0f6d14f18ee540c114b4ba14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e367c219f65b163fea8802c80801d60df0804adc0f6d14f18ee540c114b4ba14\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T21:25:55Z\\\",\\\"message\\\":\\\"troller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:55Z is after 2025-08-24T17:21:41Z]\\\\nI0929 21:25:55.105695 6368 services_controller.go:453] Built service openshift-service-ca-operator/metrics template LB for network=default: []services.LB{}\\\\nI0929 21:25:55.105686 6368 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/cluster-autoscaler-operator_TCP_cluster\\\\\\\", UUID:\\\\\\\"54fbe873-7e6d-475f-a0ad-8dd5f06d850d\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/cluster-autoscaler-operator\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/cluster-autoscaler-operator_TCP_cluster\\\\\\\", UUI\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:54Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-9wxd8_openshift-ovn-kubernetes(4e3aa70f-b0da-44c9-a850-96d4494b02fc)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68ec54cc6e84d45b9550a2dcbd53b3ffc8cc97cb94da8d9961b29965af08b7e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9wxd8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:57Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.481107 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.481156 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.481173 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.481195 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.481213 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:57Z","lastTransitionTime":"2025-09-29T21:25:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.492504 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-d5gdh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b53f9593-39bf-43e0-b1de-09192d0167cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-24xkh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-24xkh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:57Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-d5gdh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:57Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.493131 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-24xkh\" (UniqueName: \"kubernetes.io/projected/b53f9593-39bf-43e0-b1de-09192d0167cd-kube-api-access-24xkh\") pod \"network-metrics-daemon-d5gdh\" (UID: \"b53f9593-39bf-43e0-b1de-09192d0167cd\") " pod="openshift-multus/network-metrics-daemon-d5gdh" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.493162 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b53f9593-39bf-43e0-b1de-09192d0167cd-metrics-certs\") pod \"network-metrics-daemon-d5gdh\" (UID: \"b53f9593-39bf-43e0-b1de-09192d0167cd\") " pod="openshift-multus/network-metrics-daemon-d5gdh" Sep 29 21:25:57 crc kubenswrapper[4911]: E0929 21:25:57.493302 4911 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 21:25:57 crc kubenswrapper[4911]: E0929 21:25:57.493363 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b53f9593-39bf-43e0-b1de-09192d0167cd-metrics-certs podName:b53f9593-39bf-43e0-b1de-09192d0167cd nodeName:}" failed. No retries permitted until 2025-09-29 21:25:57.993345863 +0000 UTC m=+35.970458534 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/b53f9593-39bf-43e0-b1de-09192d0167cd-metrics-certs") pod "network-metrics-daemon-d5gdh" (UID: "b53f9593-39bf-43e0-b1de-09192d0167cd") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.514877 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-24xkh\" (UniqueName: \"kubernetes.io/projected/b53f9593-39bf-43e0-b1de-09192d0167cd-kube-api-access-24xkh\") pod \"network-metrics-daemon-d5gdh\" (UID: \"b53f9593-39bf-43e0-b1de-09192d0167cd\") " pod="openshift-multus/network-metrics-daemon-d5gdh" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.522881 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f56832a-7fd7-428e-85ee-55c5dea7b67a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ff3cf4013e69405637fe1483d5eb8ab2d12a0ba222c8446d93445dcb94ac9b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b32578d3e75bb839ad134f88850ba1b26e57846431fac71b64c5ddbce802464\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4539e6f57be0c686d5ea6ccbf1537c7c93698f81d2fdcfe5a143ce1f014e09\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57c9f68393ebed4fd5fe219544b0585a743989f8c54239e19ec702bddf9c30f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9dfa222360c949632050bd1dc5453a14c0dfa0d4614282132ceea2297405443e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T21:25:36Z\\\",\\\"message\\\":\\\"W0929 21:25:25.867198 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 21:25:25.867655 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759181125 cert, and key in /tmp/serving-cert-2091738680/serving-signer.crt, /tmp/serving-cert-2091738680/serving-signer.key\\\\nI0929 21:25:26.230899 1 observer_polling.go:159] Starting file observer\\\\nW0929 21:25:26.235864 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 21:25:26.236118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 21:25:26.238856 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2091738680/tls.crt::/tmp/serving-cert-2091738680/tls.key\\\\\\\"\\\\nF0929 21:25:36.606155 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47cf5d1e65f887e84d3c0694d5038edf3451c4af14d08a29d48562d05e36ea26\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:57Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.538340 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8646497-f7ad-4b38-a69f-9a14345af1c8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d3bb437bdac371dd4e22f20f304a5381c128ac2b774c51bc4f748a1c7aa3834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://129d51f84c8798ed7b1078f34899d4231c7bf4e1a75212f694c6c027b8463b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://290e37c77297fae7dcb3540911f6b2917e26daee3623c9106e34de394ccfa01f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7088f5a3752b8778de247eacb408d511197d1501e8e2811a98a9b713ca1fca04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:57Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.551056 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:57Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.563455 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a92f3bb864cba0c6615e8a07233e923c0d0681e83ad6722aa7c7043fb5966d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:57Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.574188 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cfbgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5b94596-b945-4a89-b362-ec649e8e7981\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm7xf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm7xf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:56Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-cfbgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:57Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.584187 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.584254 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.584270 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.584310 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.584323 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:57Z","lastTransitionTime":"2025-09-29T21:25:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.584446 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dnhjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fe62a53-f0b8-4c66-8adf-3b9f8bef4195\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b11f03867ce6c678d04431a19903c21acd7665f0cca4c7b0344e9d1cf0e17e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64hpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dnhjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:57Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.606343 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bp485" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"364060da-3bac-4f3e-b8b8-a64b0441cb5e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7dba9267c23bc8990d70b320a688e96796cf75aca99f5a3ce12fa82327cb2d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7962008bbec5a6556b4ab76cf151a68e971bde07fdc5fe560c561f42cf597d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7962008bbec5a6556b4ab76cf151a68e971bde07fdc5fe560c561f42cf597d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a6af280cf9196a9ed2a1798f7f4e7d6504ef6707c1d265fb9b232c02d70c205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a6af280cf9196a9ed2a1798f7f4e7d6504ef6707c1d265fb9b232c02d70c205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4783145a17a1233241a3766d9b259d16ed82b2076ff6790c1a368d713271cd91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4783145a17a1233241a3766d9b259d16ed82b2076ff6790c1a368d713271cd91\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0bb5fec15ae4eb585d8ba1c0bedabe62a4094ca50044a09b44439dc555f064d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0bb5fec15ae4eb585d8ba1c0bedabe62a4094ca50044a09b44439dc555f064d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c153df9063b1a1e897e8d02e139d641d00d84dce42004be4c69dd55d44e7436b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c153df9063b1a1e897e8d02e139d641d00d84dce42004be4c69dd55d44e7436b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9acf5d7a17bf6bb6d1ad415d830594b8f6b986fbb7c58706b19d28546dfae6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9acf5d7a17bf6bb6d1ad415d830594b8f6b986fbb7c58706b19d28546dfae6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bp485\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:57Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.620473 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dz6zq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c16af8d-e647-4820-b96f-298cce113ab1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e64c28e47bb861649d58b34f5e3c369cfaf8c808435b32a13ebfc2c02b7b6db9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbgg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dz6zq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:57Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.651163 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85fccbbf-637b-4195-89ba-828db0f10fb3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11c497bb84920c85649975dc5a3ba883723017d5503200e47c08427f45b9edfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a594d1677deac968c88fac4a64d4b047cf02ad8d6db74ee2ebe170d4135f588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2db0acd86c2dfcd0303626aa6b66832b18aec98dab908328288cdf0e0a18f24d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72188960402faa7956adf357824c6ea0a7015a75ed5191853ca95b12feb2cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf2588ddec2be0fa843630cdc268617c399a7a1615705cc4b088fc806031d557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0546d8270e2cbba06a241e421fa7ea130e53302a3f71f81829198d7ab8f3e6b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0546d8270e2cbba06a241e421fa7ea130e53302a3f71f81829198d7ab8f3e6b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123ea91d0a01f780d09196824c362eab68a4b8d943d414e80a3a0bbc2959e178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://123ea91d0a01f780d09196824c362eab68a4b8d943d414e80a3a0bbc2959e178\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://17751d3a9dd568ddb6c27efe08354f37aa4af7a91312e8c41e3e2e509d5be8ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://17751d3a9dd568ddb6c27efe08354f37aa4af7a91312e8c41e3e2e509d5be8ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:57Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.668726 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c6a0af8cc03379c6fa2a3969e5852a340c32bd8513dc7c963e074bb1694d436\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:57Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.682127 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w647f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50640abc-40db-4390-82d1-f3cfc76da71c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0479676aa753b6e60f9a57eb2a3055ddc5b10bce1547ac60e216c0865e79f24b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://568d099d7beea914cf9f9eb703e36dd179359d3f4406bae454334e39b0de79ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w647f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:57Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.687615 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.687665 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.687676 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.687696 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.687707 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:57Z","lastTransitionTime":"2025-09-29T21:25:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.694680 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:25:57 crc kubenswrapper[4911]: E0929 21:25:57.695115 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 21:26:13.695075621 +0000 UTC m=+51.672188292 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.700640 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 21:25:57 crc kubenswrapper[4911]: E0929 21:25:57.700778 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.791428 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.791959 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.792105 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.792249 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.792426 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:57Z","lastTransitionTime":"2025-09-29T21:25:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.796248 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.796313 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.796359 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.796411 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:25:57 crc kubenswrapper[4911]: E0929 21:25:57.796539 4911 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 21:25:57 crc kubenswrapper[4911]: E0929 21:25:57.796614 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 21:26:13.796591243 +0000 UTC m=+51.773703944 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 21:25:57 crc kubenswrapper[4911]: E0929 21:25:57.797064 4911 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 21:25:57 crc kubenswrapper[4911]: E0929 21:25:57.797231 4911 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 21:25:57 crc kubenswrapper[4911]: E0929 21:25:57.797374 4911 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 21:25:57 crc kubenswrapper[4911]: E0929 21:25:57.797568 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-29 21:26:13.797536543 +0000 UTC m=+51.774649254 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 21:25:57 crc kubenswrapper[4911]: E0929 21:25:57.797120 4911 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 21:25:57 crc kubenswrapper[4911]: E0929 21:25:57.797968 4911 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 21:25:57 crc kubenswrapper[4911]: E0929 21:25:57.798092 4911 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 21:25:57 crc kubenswrapper[4911]: E0929 21:25:57.797132 4911 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 21:25:57 crc kubenswrapper[4911]: E0929 21:25:57.798318 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 21:26:13.798290135 +0000 UTC m=+51.775402856 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 21:25:57 crc kubenswrapper[4911]: E0929 21:25:57.798484 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-29 21:26:13.798464031 +0000 UTC m=+51.775576742 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.895714 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.896173 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.896509 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.896658 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.896788 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:57Z","lastTransitionTime":"2025-09-29T21:25:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:57 crc kubenswrapper[4911]: I0929 21:25:57.998834 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b53f9593-39bf-43e0-b1de-09192d0167cd-metrics-certs\") pod \"network-metrics-daemon-d5gdh\" (UID: \"b53f9593-39bf-43e0-b1de-09192d0167cd\") " pod="openshift-multus/network-metrics-daemon-d5gdh" Sep 29 21:25:57 crc kubenswrapper[4911]: E0929 21:25:57.999065 4911 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 21:25:57 crc kubenswrapper[4911]: E0929 21:25:57.999245 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b53f9593-39bf-43e0-b1de-09192d0167cd-metrics-certs podName:b53f9593-39bf-43e0-b1de-09192d0167cd nodeName:}" failed. No retries permitted until 2025-09-29 21:25:58.999215488 +0000 UTC m=+36.976328189 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/b53f9593-39bf-43e0-b1de-09192d0167cd-metrics-certs") pod "network-metrics-daemon-d5gdh" (UID: "b53f9593-39bf-43e0-b1de-09192d0167cd") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.001472 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.001554 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.001574 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.001601 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.001618 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:58Z","lastTransitionTime":"2025-09-29T21:25:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.067369 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cfbgk" event={"ID":"b5b94596-b945-4a89-b362-ec649e8e7981","Type":"ContainerStarted","Data":"a1f8c9f36d4fa9ccfe9c4fdf2794b819a0de928056fc3c320b6c83f9c518258f"} Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.067442 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cfbgk" event={"ID":"b5b94596-b945-4a89-b362-ec649e8e7981","Type":"ContainerStarted","Data":"5154aa6a1a3a9b1a5a58752607c01b955e28b7e896bf9b99b5cca2448077ebd8"} Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.095681 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bp485" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"364060da-3bac-4f3e-b8b8-a64b0441cb5e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7dba9267c23bc8990d70b320a688e96796cf75aca99f5a3ce12fa82327cb2d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7962008bbec5a6556b4ab76cf151a68e971bde07fdc5fe560c561f42cf597d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7962008bbec5a6556b4ab76cf151a68e971bde07fdc5fe560c561f42cf597d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a6af280cf9196a9ed2a1798f7f4e7d6504ef6707c1d265fb9b232c02d70c205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a6af280cf9196a9ed2a1798f7f4e7d6504ef6707c1d265fb9b232c02d70c205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4783145a17a1233241a3766d9b259d16ed82b2076ff6790c1a368d713271cd91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4783145a17a1233241a3766d9b259d16ed82b2076ff6790c1a368d713271cd91\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0bb5fec15ae4eb585d8ba1c0bedabe62a4094ca50044a09b44439dc555f064d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0bb5fec15ae4eb585d8ba1c0bedabe62a4094ca50044a09b44439dc555f064d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c153df9063b1a1e897e8d02e139d641d00d84dce42004be4c69dd55d44e7436b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c153df9063b1a1e897e8d02e139d641d00d84dce42004be4c69dd55d44e7436b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9acf5d7a17bf6bb6d1ad415d830594b8f6b986fbb7c58706b19d28546dfae6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9acf5d7a17bf6bb6d1ad415d830594b8f6b986fbb7c58706b19d28546dfae6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bp485\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:58Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.105353 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.105549 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.105573 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.105632 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.105658 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:58Z","lastTransitionTime":"2025-09-29T21:25:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.113968 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dz6zq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c16af8d-e647-4820-b96f-298cce113ab1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e64c28e47bb861649d58b34f5e3c369cfaf8c808435b32a13ebfc2c02b7b6db9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbgg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dz6zq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:58Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.149670 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85fccbbf-637b-4195-89ba-828db0f10fb3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11c497bb84920c85649975dc5a3ba883723017d5503200e47c08427f45b9edfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a594d1677deac968c88fac4a64d4b047cf02ad8d6db74ee2ebe170d4135f588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2db0acd86c2dfcd0303626aa6b66832b18aec98dab908328288cdf0e0a18f24d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72188960402faa7956adf357824c6ea0a7015a75ed5191853ca95b12feb2cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf2588ddec2be0fa843630cdc268617c399a7a1615705cc4b088fc806031d557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0546d8270e2cbba06a241e421fa7ea130e53302a3f71f81829198d7ab8f3e6b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0546d8270e2cbba06a241e421fa7ea130e53302a3f71f81829198d7ab8f3e6b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123ea91d0a01f780d09196824c362eab68a4b8d943d414e80a3a0bbc2959e178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://123ea91d0a01f780d09196824c362eab68a4b8d943d414e80a3a0bbc2959e178\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://17751d3a9dd568ddb6c27efe08354f37aa4af7a91312e8c41e3e2e509d5be8ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://17751d3a9dd568ddb6c27efe08354f37aa4af7a91312e8c41e3e2e509d5be8ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:58Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.173625 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c6a0af8cc03379c6fa2a3969e5852a340c32bd8513dc7c963e074bb1694d436\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:58Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.188199 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dnhjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fe62a53-f0b8-4c66-8adf-3b9f8bef4195\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b11f03867ce6c678d04431a19903c21acd7665f0cca4c7b0344e9d1cf0e17e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64hpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dnhjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:58Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.206842 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w647f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50640abc-40db-4390-82d1-f3cfc76da71c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0479676aa753b6e60f9a57eb2a3055ddc5b10bce1547ac60e216c0865e79f24b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://568d099d7beea914cf9f9eb703e36dd179359d3f4406bae454334e39b0de79ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w647f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:58Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.209197 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.209344 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.209459 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.209558 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.209647 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:58Z","lastTransitionTime":"2025-09-29T21:25:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.229244 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:58Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.249771 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4052e710e0a06bf38d26e6006248d090051284bc0eae7a68b17241ef8016909\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a7d7fad5efb67cc7ef796977f03cc9c0e69fcc4340ca28749fffcc13236ed8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:58Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.268859 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lrfbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1179c900-e866-4c5a-bb06-6032cc03a075\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bb16b2f889ccaf66b96ffea2f6585967c14530a95a95000b652ce817e97c37e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jwhfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lrfbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:58Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.292627 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e3aa70f-b0da-44c9-a850-96d4494b02fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3568f71a4a1762e0d9be8ce4c317471f3f99a7c5b92b7daeb9fbace778f7e66b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00681163bfe474dabf977e0c43ac71148c2a472efd4bc57f3d145ba1134a74aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db1941e9959584d925b6ac546c234ce0dfaa2960a1277f282b51deb5ee56ec87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e87c1e61cde26c6ae4496e13ebe8e46857c8656720399f3d89b9e9a75bb4681d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c06f6555e780f2f9f69cabcd3e26595acfde3e973387ed7b10345c673c17f5d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c62aa953604f4ce35ca351ecf4e03da067af96f004c3ba3ea5f3a54b89def33d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e367c219f65b163fea8802c80801d60df0804adc0f6d14f18ee540c114b4ba14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e367c219f65b163fea8802c80801d60df0804adc0f6d14f18ee540c114b4ba14\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T21:25:55Z\\\",\\\"message\\\":\\\"troller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:55Z is after 2025-08-24T17:21:41Z]\\\\nI0929 21:25:55.105695 6368 services_controller.go:453] Built service openshift-service-ca-operator/metrics template LB for network=default: []services.LB{}\\\\nI0929 21:25:55.105686 6368 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/cluster-autoscaler-operator_TCP_cluster\\\\\\\", UUID:\\\\\\\"54fbe873-7e6d-475f-a0ad-8dd5f06d850d\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/cluster-autoscaler-operator\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/cluster-autoscaler-operator_TCP_cluster\\\\\\\", UUI\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:54Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-9wxd8_openshift-ovn-kubernetes(4e3aa70f-b0da-44c9-a850-96d4494b02fc)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68ec54cc6e84d45b9550a2dcbd53b3ffc8cc97cb94da8d9961b29965af08b7e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9wxd8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:58Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.309590 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-d5gdh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b53f9593-39bf-43e0-b1de-09192d0167cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-24xkh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-24xkh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:57Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-d5gdh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:58Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.312418 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.312593 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.312992 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.313298 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.313523 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:58Z","lastTransitionTime":"2025-09-29T21:25:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.331139 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f56832a-7fd7-428e-85ee-55c5dea7b67a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ff3cf4013e69405637fe1483d5eb8ab2d12a0ba222c8446d93445dcb94ac9b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b32578d3e75bb839ad134f88850ba1b26e57846431fac71b64c5ddbce802464\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4539e6f57be0c686d5ea6ccbf1537c7c93698f81d2fdcfe5a143ce1f014e09\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57c9f68393ebed4fd5fe219544b0585a743989f8c54239e19ec702bddf9c30f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9dfa222360c949632050bd1dc5453a14c0dfa0d4614282132ceea2297405443e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T21:25:36Z\\\",\\\"message\\\":\\\"W0929 21:25:25.867198 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 21:25:25.867655 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759181125 cert, and key in /tmp/serving-cert-2091738680/serving-signer.crt, /tmp/serving-cert-2091738680/serving-signer.key\\\\nI0929 21:25:26.230899 1 observer_polling.go:159] Starting file observer\\\\nW0929 21:25:26.235864 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 21:25:26.236118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 21:25:26.238856 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2091738680/tls.crt::/tmp/serving-cert-2091738680/tls.key\\\\\\\"\\\\nF0929 21:25:36.606155 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47cf5d1e65f887e84d3c0694d5038edf3451c4af14d08a29d48562d05e36ea26\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:58Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.350025 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8646497-f7ad-4b38-a69f-9a14345af1c8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d3bb437bdac371dd4e22f20f304a5381c128ac2b774c51bc4f748a1c7aa3834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://129d51f84c8798ed7b1078f34899d4231c7bf4e1a75212f694c6c027b8463b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://290e37c77297fae7dcb3540911f6b2917e26daee3623c9106e34de394ccfa01f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7088f5a3752b8778de247eacb408d511197d1501e8e2811a98a9b713ca1fca04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:58Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.364529 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:58Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.382712 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a92f3bb864cba0c6615e8a07233e923c0d0681e83ad6722aa7c7043fb5966d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:58Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.397528 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cfbgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5b94596-b945-4a89-b362-ec649e8e7981\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5154aa6a1a3a9b1a5a58752607c01b955e28b7e896bf9b99b5cca2448077ebd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm7xf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1f8c9f36d4fa9ccfe9c4fdf2794b819a0de928056fc3c320b6c83f9c518258f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm7xf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:56Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-cfbgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:58Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.416066 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:58Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.417065 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.417205 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.417323 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.417439 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.417522 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:58Z","lastTransitionTime":"2025-09-29T21:25:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.526960 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.527046 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.527074 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.527115 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.527140 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:58Z","lastTransitionTime":"2025-09-29T21:25:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.630428 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.630488 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.630507 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.630532 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.630551 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:58Z","lastTransitionTime":"2025-09-29T21:25:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.650555 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.651506 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.651568 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.651608 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.651631 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:58Z","lastTransitionTime":"2025-09-29T21:25:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:58 crc kubenswrapper[4911]: E0929 21:25:58.675406 4911 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7dded9b5-8ab5-45b2-be5a-4613b6e8208f\\\",\\\"systemUUID\\\":\\\"6cb362cf-0841-40fb-a840-f46642f78745\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:58Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.681572 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.681639 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.681661 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.681689 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.681709 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:58Z","lastTransitionTime":"2025-09-29T21:25:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:58 crc kubenswrapper[4911]: E0929 21:25:58.700942 4911 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7dded9b5-8ab5-45b2-be5a-4613b6e8208f\\\",\\\"systemUUID\\\":\\\"6cb362cf-0841-40fb-a840-f46642f78745\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:58Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.701084 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.701203 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 21:25:58 crc kubenswrapper[4911]: E0929 21:25:58.701695 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 21:25:58 crc kubenswrapper[4911]: E0929 21:25:58.701915 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.701217 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-d5gdh" Sep 29 21:25:58 crc kubenswrapper[4911]: E0929 21:25:58.702089 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-d5gdh" podUID="b53f9593-39bf-43e0-b1de-09192d0167cd" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.707859 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.707909 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.707927 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.707952 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.707970 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:58Z","lastTransitionTime":"2025-09-29T21:25:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:58 crc kubenswrapper[4911]: E0929 21:25:58.731040 4911 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7dded9b5-8ab5-45b2-be5a-4613b6e8208f\\\",\\\"systemUUID\\\":\\\"6cb362cf-0841-40fb-a840-f46642f78745\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:58Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.736542 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.736639 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.736659 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.736686 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.736711 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:58Z","lastTransitionTime":"2025-09-29T21:25:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:58 crc kubenswrapper[4911]: E0929 21:25:58.756218 4911 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7dded9b5-8ab5-45b2-be5a-4613b6e8208f\\\",\\\"systemUUID\\\":\\\"6cb362cf-0841-40fb-a840-f46642f78745\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:58Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.762143 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.762240 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.762270 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.762305 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.762335 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:58Z","lastTransitionTime":"2025-09-29T21:25:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:58 crc kubenswrapper[4911]: E0929 21:25:58.781026 4911 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7dded9b5-8ab5-45b2-be5a-4613b6e8208f\\\",\\\"systemUUID\\\":\\\"6cb362cf-0841-40fb-a840-f46642f78745\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:58Z is after 2025-08-24T17:21:41Z" Sep 29 21:25:58 crc kubenswrapper[4911]: E0929 21:25:58.781582 4911 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.784522 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.784652 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.784742 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.784860 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.784953 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:58Z","lastTransitionTime":"2025-09-29T21:25:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.888609 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.888990 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.889094 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.889169 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.889227 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:58Z","lastTransitionTime":"2025-09-29T21:25:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.992079 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.992384 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.992447 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.992514 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:58 crc kubenswrapper[4911]: I0929 21:25:58.992582 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:58Z","lastTransitionTime":"2025-09-29T21:25:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:59 crc kubenswrapper[4911]: I0929 21:25:59.008489 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b53f9593-39bf-43e0-b1de-09192d0167cd-metrics-certs\") pod \"network-metrics-daemon-d5gdh\" (UID: \"b53f9593-39bf-43e0-b1de-09192d0167cd\") " pod="openshift-multus/network-metrics-daemon-d5gdh" Sep 29 21:25:59 crc kubenswrapper[4911]: E0929 21:25:59.008886 4911 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 21:25:59 crc kubenswrapper[4911]: E0929 21:25:59.009184 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b53f9593-39bf-43e0-b1de-09192d0167cd-metrics-certs podName:b53f9593-39bf-43e0-b1de-09192d0167cd nodeName:}" failed. No retries permitted until 2025-09-29 21:26:01.009158252 +0000 UTC m=+38.986270923 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/b53f9593-39bf-43e0-b1de-09192d0167cd-metrics-certs") pod "network-metrics-daemon-d5gdh" (UID: "b53f9593-39bf-43e0-b1de-09192d0167cd") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 21:25:59 crc kubenswrapper[4911]: I0929 21:25:59.095538 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:59 crc kubenswrapper[4911]: I0929 21:25:59.095633 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:59 crc kubenswrapper[4911]: I0929 21:25:59.095663 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:59 crc kubenswrapper[4911]: I0929 21:25:59.095703 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:59 crc kubenswrapper[4911]: I0929 21:25:59.095734 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:59Z","lastTransitionTime":"2025-09-29T21:25:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:59 crc kubenswrapper[4911]: I0929 21:25:59.199770 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:59 crc kubenswrapper[4911]: I0929 21:25:59.199867 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:59 crc kubenswrapper[4911]: I0929 21:25:59.199886 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:59 crc kubenswrapper[4911]: I0929 21:25:59.199920 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:59 crc kubenswrapper[4911]: I0929 21:25:59.199938 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:59Z","lastTransitionTime":"2025-09-29T21:25:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:59 crc kubenswrapper[4911]: I0929 21:25:59.304006 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:59 crc kubenswrapper[4911]: I0929 21:25:59.304084 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:59 crc kubenswrapper[4911]: I0929 21:25:59.304107 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:59 crc kubenswrapper[4911]: I0929 21:25:59.304136 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:59 crc kubenswrapper[4911]: I0929 21:25:59.304154 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:59Z","lastTransitionTime":"2025-09-29T21:25:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:59 crc kubenswrapper[4911]: I0929 21:25:59.407773 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:59 crc kubenswrapper[4911]: I0929 21:25:59.408131 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:59 crc kubenswrapper[4911]: I0929 21:25:59.408197 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:59 crc kubenswrapper[4911]: I0929 21:25:59.408318 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:59 crc kubenswrapper[4911]: I0929 21:25:59.408388 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:59Z","lastTransitionTime":"2025-09-29T21:25:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:59 crc kubenswrapper[4911]: I0929 21:25:59.512448 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:59 crc kubenswrapper[4911]: I0929 21:25:59.512525 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:59 crc kubenswrapper[4911]: I0929 21:25:59.512546 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:59 crc kubenswrapper[4911]: I0929 21:25:59.512575 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:59 crc kubenswrapper[4911]: I0929 21:25:59.512594 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:59Z","lastTransitionTime":"2025-09-29T21:25:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:59 crc kubenswrapper[4911]: I0929 21:25:59.615161 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:59 crc kubenswrapper[4911]: I0929 21:25:59.615247 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:59 crc kubenswrapper[4911]: I0929 21:25:59.615267 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:59 crc kubenswrapper[4911]: I0929 21:25:59.615298 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:59 crc kubenswrapper[4911]: I0929 21:25:59.615318 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:59Z","lastTransitionTime":"2025-09-29T21:25:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:59 crc kubenswrapper[4911]: I0929 21:25:59.700553 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 21:25:59 crc kubenswrapper[4911]: E0929 21:25:59.701254 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 21:25:59 crc kubenswrapper[4911]: I0929 21:25:59.718125 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:59 crc kubenswrapper[4911]: I0929 21:25:59.718180 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:59 crc kubenswrapper[4911]: I0929 21:25:59.718197 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:59 crc kubenswrapper[4911]: I0929 21:25:59.718223 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:59 crc kubenswrapper[4911]: I0929 21:25:59.718240 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:59Z","lastTransitionTime":"2025-09-29T21:25:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:59 crc kubenswrapper[4911]: I0929 21:25:59.821025 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:59 crc kubenswrapper[4911]: I0929 21:25:59.821092 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:59 crc kubenswrapper[4911]: I0929 21:25:59.821110 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:59 crc kubenswrapper[4911]: I0929 21:25:59.821138 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:59 crc kubenswrapper[4911]: I0929 21:25:59.821157 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:59Z","lastTransitionTime":"2025-09-29T21:25:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:25:59 crc kubenswrapper[4911]: I0929 21:25:59.925049 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:25:59 crc kubenswrapper[4911]: I0929 21:25:59.925106 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:25:59 crc kubenswrapper[4911]: I0929 21:25:59.925117 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:25:59 crc kubenswrapper[4911]: I0929 21:25:59.925137 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:25:59 crc kubenswrapper[4911]: I0929 21:25:59.925149 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:25:59Z","lastTransitionTime":"2025-09-29T21:25:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:00 crc kubenswrapper[4911]: I0929 21:26:00.027892 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:00 crc kubenswrapper[4911]: I0929 21:26:00.027934 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:00 crc kubenswrapper[4911]: I0929 21:26:00.027944 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:00 crc kubenswrapper[4911]: I0929 21:26:00.027962 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:00 crc kubenswrapper[4911]: I0929 21:26:00.027972 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:00Z","lastTransitionTime":"2025-09-29T21:26:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:00 crc kubenswrapper[4911]: I0929 21:26:00.130116 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:00 crc kubenswrapper[4911]: I0929 21:26:00.130165 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:00 crc kubenswrapper[4911]: I0929 21:26:00.130177 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:00 crc kubenswrapper[4911]: I0929 21:26:00.130239 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:00 crc kubenswrapper[4911]: I0929 21:26:00.130253 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:00Z","lastTransitionTime":"2025-09-29T21:26:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:00 crc kubenswrapper[4911]: I0929 21:26:00.233727 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:00 crc kubenswrapper[4911]: I0929 21:26:00.233863 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:00 crc kubenswrapper[4911]: I0929 21:26:00.233876 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:00 crc kubenswrapper[4911]: I0929 21:26:00.233897 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:00 crc kubenswrapper[4911]: I0929 21:26:00.233908 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:00Z","lastTransitionTime":"2025-09-29T21:26:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:00 crc kubenswrapper[4911]: I0929 21:26:00.336921 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:00 crc kubenswrapper[4911]: I0929 21:26:00.336971 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:00 crc kubenswrapper[4911]: I0929 21:26:00.336980 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:00 crc kubenswrapper[4911]: I0929 21:26:00.336998 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:00 crc kubenswrapper[4911]: I0929 21:26:00.337010 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:00Z","lastTransitionTime":"2025-09-29T21:26:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:00 crc kubenswrapper[4911]: I0929 21:26:00.439459 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:00 crc kubenswrapper[4911]: I0929 21:26:00.439495 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:00 crc kubenswrapper[4911]: I0929 21:26:00.439504 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:00 crc kubenswrapper[4911]: I0929 21:26:00.439520 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:00 crc kubenswrapper[4911]: I0929 21:26:00.439532 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:00Z","lastTransitionTime":"2025-09-29T21:26:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:00 crc kubenswrapper[4911]: I0929 21:26:00.543229 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:00 crc kubenswrapper[4911]: I0929 21:26:00.543290 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:00 crc kubenswrapper[4911]: I0929 21:26:00.543302 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:00 crc kubenswrapper[4911]: I0929 21:26:00.543325 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:00 crc kubenswrapper[4911]: I0929 21:26:00.543339 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:00Z","lastTransitionTime":"2025-09-29T21:26:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:00 crc kubenswrapper[4911]: I0929 21:26:00.647523 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:00 crc kubenswrapper[4911]: I0929 21:26:00.647565 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:00 crc kubenswrapper[4911]: I0929 21:26:00.647575 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:00 crc kubenswrapper[4911]: I0929 21:26:00.647592 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:00 crc kubenswrapper[4911]: I0929 21:26:00.647602 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:00Z","lastTransitionTime":"2025-09-29T21:26:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:00 crc kubenswrapper[4911]: I0929 21:26:00.700295 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-d5gdh" Sep 29 21:26:00 crc kubenswrapper[4911]: I0929 21:26:00.700316 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:26:00 crc kubenswrapper[4911]: E0929 21:26:00.700440 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-d5gdh" podUID="b53f9593-39bf-43e0-b1de-09192d0167cd" Sep 29 21:26:00 crc kubenswrapper[4911]: I0929 21:26:00.700486 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 21:26:00 crc kubenswrapper[4911]: E0929 21:26:00.700612 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 21:26:00 crc kubenswrapper[4911]: E0929 21:26:00.700697 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 21:26:00 crc kubenswrapper[4911]: I0929 21:26:00.750527 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:00 crc kubenswrapper[4911]: I0929 21:26:00.750578 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:00 crc kubenswrapper[4911]: I0929 21:26:00.750590 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:00 crc kubenswrapper[4911]: I0929 21:26:00.750610 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:00 crc kubenswrapper[4911]: I0929 21:26:00.750622 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:00Z","lastTransitionTime":"2025-09-29T21:26:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:00 crc kubenswrapper[4911]: I0929 21:26:00.852894 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:00 crc kubenswrapper[4911]: I0929 21:26:00.852950 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:00 crc kubenswrapper[4911]: I0929 21:26:00.852960 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:00 crc kubenswrapper[4911]: I0929 21:26:00.852979 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:00 crc kubenswrapper[4911]: I0929 21:26:00.852989 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:00Z","lastTransitionTime":"2025-09-29T21:26:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:00 crc kubenswrapper[4911]: I0929 21:26:00.955894 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:00 crc kubenswrapper[4911]: I0929 21:26:00.955942 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:00 crc kubenswrapper[4911]: I0929 21:26:00.955953 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:00 crc kubenswrapper[4911]: I0929 21:26:00.955971 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:00 crc kubenswrapper[4911]: I0929 21:26:00.955984 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:00Z","lastTransitionTime":"2025-09-29T21:26:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:01 crc kubenswrapper[4911]: I0929 21:26:01.032698 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b53f9593-39bf-43e0-b1de-09192d0167cd-metrics-certs\") pod \"network-metrics-daemon-d5gdh\" (UID: \"b53f9593-39bf-43e0-b1de-09192d0167cd\") " pod="openshift-multus/network-metrics-daemon-d5gdh" Sep 29 21:26:01 crc kubenswrapper[4911]: E0929 21:26:01.032906 4911 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 21:26:01 crc kubenswrapper[4911]: E0929 21:26:01.032985 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b53f9593-39bf-43e0-b1de-09192d0167cd-metrics-certs podName:b53f9593-39bf-43e0-b1de-09192d0167cd nodeName:}" failed. No retries permitted until 2025-09-29 21:26:05.032965252 +0000 UTC m=+43.010077923 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/b53f9593-39bf-43e0-b1de-09192d0167cd-metrics-certs") pod "network-metrics-daemon-d5gdh" (UID: "b53f9593-39bf-43e0-b1de-09192d0167cd") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 21:26:01 crc kubenswrapper[4911]: I0929 21:26:01.059195 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:01 crc kubenswrapper[4911]: I0929 21:26:01.059257 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:01 crc kubenswrapper[4911]: I0929 21:26:01.059269 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:01 crc kubenswrapper[4911]: I0929 21:26:01.059285 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:01 crc kubenswrapper[4911]: I0929 21:26:01.059294 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:01Z","lastTransitionTime":"2025-09-29T21:26:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:01 crc kubenswrapper[4911]: I0929 21:26:01.162169 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:01 crc kubenswrapper[4911]: I0929 21:26:01.162216 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:01 crc kubenswrapper[4911]: I0929 21:26:01.162225 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:01 crc kubenswrapper[4911]: I0929 21:26:01.162240 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:01 crc kubenswrapper[4911]: I0929 21:26:01.162250 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:01Z","lastTransitionTime":"2025-09-29T21:26:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:01 crc kubenswrapper[4911]: I0929 21:26:01.265084 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:01 crc kubenswrapper[4911]: I0929 21:26:01.265132 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:01 crc kubenswrapper[4911]: I0929 21:26:01.265141 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:01 crc kubenswrapper[4911]: I0929 21:26:01.265158 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:01 crc kubenswrapper[4911]: I0929 21:26:01.265169 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:01Z","lastTransitionTime":"2025-09-29T21:26:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:01 crc kubenswrapper[4911]: I0929 21:26:01.369075 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:01 crc kubenswrapper[4911]: I0929 21:26:01.369139 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:01 crc kubenswrapper[4911]: I0929 21:26:01.369149 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:01 crc kubenswrapper[4911]: I0929 21:26:01.369173 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:01 crc kubenswrapper[4911]: I0929 21:26:01.369185 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:01Z","lastTransitionTime":"2025-09-29T21:26:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:01 crc kubenswrapper[4911]: I0929 21:26:01.472922 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:01 crc kubenswrapper[4911]: I0929 21:26:01.472980 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:01 crc kubenswrapper[4911]: I0929 21:26:01.472994 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:01 crc kubenswrapper[4911]: I0929 21:26:01.473016 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:01 crc kubenswrapper[4911]: I0929 21:26:01.473031 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:01Z","lastTransitionTime":"2025-09-29T21:26:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:01 crc kubenswrapper[4911]: I0929 21:26:01.577242 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:01 crc kubenswrapper[4911]: I0929 21:26:01.577296 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:01 crc kubenswrapper[4911]: I0929 21:26:01.577306 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:01 crc kubenswrapper[4911]: I0929 21:26:01.577325 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:01 crc kubenswrapper[4911]: I0929 21:26:01.577338 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:01Z","lastTransitionTime":"2025-09-29T21:26:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:01 crc kubenswrapper[4911]: I0929 21:26:01.680870 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:01 crc kubenswrapper[4911]: I0929 21:26:01.680930 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:01 crc kubenswrapper[4911]: I0929 21:26:01.680943 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:01 crc kubenswrapper[4911]: I0929 21:26:01.680966 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:01 crc kubenswrapper[4911]: I0929 21:26:01.680982 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:01Z","lastTransitionTime":"2025-09-29T21:26:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:01 crc kubenswrapper[4911]: I0929 21:26:01.700505 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 21:26:01 crc kubenswrapper[4911]: E0929 21:26:01.700668 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 21:26:01 crc kubenswrapper[4911]: I0929 21:26:01.783925 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:01 crc kubenswrapper[4911]: I0929 21:26:01.784005 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:01 crc kubenswrapper[4911]: I0929 21:26:01.784029 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:01 crc kubenswrapper[4911]: I0929 21:26:01.784062 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:01 crc kubenswrapper[4911]: I0929 21:26:01.784089 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:01Z","lastTransitionTime":"2025-09-29T21:26:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:01 crc kubenswrapper[4911]: I0929 21:26:01.887958 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:01 crc kubenswrapper[4911]: I0929 21:26:01.888024 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:01 crc kubenswrapper[4911]: I0929 21:26:01.888043 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:01 crc kubenswrapper[4911]: I0929 21:26:01.888070 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:01 crc kubenswrapper[4911]: I0929 21:26:01.888093 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:01Z","lastTransitionTime":"2025-09-29T21:26:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:01 crc kubenswrapper[4911]: I0929 21:26:01.991003 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:01 crc kubenswrapper[4911]: I0929 21:26:01.991063 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:01 crc kubenswrapper[4911]: I0929 21:26:01.991087 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:01 crc kubenswrapper[4911]: I0929 21:26:01.991117 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:01 crc kubenswrapper[4911]: I0929 21:26:01.991139 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:01Z","lastTransitionTime":"2025-09-29T21:26:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:02 crc kubenswrapper[4911]: I0929 21:26:02.093552 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:02 crc kubenswrapper[4911]: I0929 21:26:02.093637 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:02 crc kubenswrapper[4911]: I0929 21:26:02.093657 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:02 crc kubenswrapper[4911]: I0929 21:26:02.093687 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:02 crc kubenswrapper[4911]: I0929 21:26:02.093710 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:02Z","lastTransitionTime":"2025-09-29T21:26:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:02 crc kubenswrapper[4911]: I0929 21:26:02.196619 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:02 crc kubenswrapper[4911]: I0929 21:26:02.196672 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:02 crc kubenswrapper[4911]: I0929 21:26:02.196680 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:02 crc kubenswrapper[4911]: I0929 21:26:02.197163 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:02 crc kubenswrapper[4911]: I0929 21:26:02.197187 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:02Z","lastTransitionTime":"2025-09-29T21:26:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:02 crc kubenswrapper[4911]: I0929 21:26:02.300899 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:02 crc kubenswrapper[4911]: I0929 21:26:02.300936 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:02 crc kubenswrapper[4911]: I0929 21:26:02.300945 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:02 crc kubenswrapper[4911]: I0929 21:26:02.300961 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:02 crc kubenswrapper[4911]: I0929 21:26:02.300971 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:02Z","lastTransitionTime":"2025-09-29T21:26:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:02 crc kubenswrapper[4911]: I0929 21:26:02.404668 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:02 crc kubenswrapper[4911]: I0929 21:26:02.404725 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:02 crc kubenswrapper[4911]: I0929 21:26:02.404737 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:02 crc kubenswrapper[4911]: I0929 21:26:02.404758 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:02 crc kubenswrapper[4911]: I0929 21:26:02.404771 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:02Z","lastTransitionTime":"2025-09-29T21:26:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:02 crc kubenswrapper[4911]: I0929 21:26:02.508654 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:02 crc kubenswrapper[4911]: I0929 21:26:02.508722 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:02 crc kubenswrapper[4911]: I0929 21:26:02.508740 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:02 crc kubenswrapper[4911]: I0929 21:26:02.508763 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:02 crc kubenswrapper[4911]: I0929 21:26:02.508782 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:02Z","lastTransitionTime":"2025-09-29T21:26:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:02 crc kubenswrapper[4911]: I0929 21:26:02.611164 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:02 crc kubenswrapper[4911]: I0929 21:26:02.611223 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:02 crc kubenswrapper[4911]: I0929 21:26:02.611235 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:02 crc kubenswrapper[4911]: I0929 21:26:02.611260 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:02 crc kubenswrapper[4911]: I0929 21:26:02.611276 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:02Z","lastTransitionTime":"2025-09-29T21:26:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:02 crc kubenswrapper[4911]: I0929 21:26:02.700813 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:26:02 crc kubenswrapper[4911]: E0929 21:26:02.700993 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 21:26:02 crc kubenswrapper[4911]: I0929 21:26:02.700991 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-d5gdh" Sep 29 21:26:02 crc kubenswrapper[4911]: I0929 21:26:02.701118 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 21:26:02 crc kubenswrapper[4911]: E0929 21:26:02.701496 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-d5gdh" podUID="b53f9593-39bf-43e0-b1de-09192d0167cd" Sep 29 21:26:02 crc kubenswrapper[4911]: E0929 21:26:02.701691 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 21:26:02 crc kubenswrapper[4911]: I0929 21:26:02.717596 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:02 crc kubenswrapper[4911]: I0929 21:26:02.717658 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:02 crc kubenswrapper[4911]: I0929 21:26:02.717684 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:02 crc kubenswrapper[4911]: I0929 21:26:02.717716 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:02 crc kubenswrapper[4911]: I0929 21:26:02.717741 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:02Z","lastTransitionTime":"2025-09-29T21:26:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:02 crc kubenswrapper[4911]: I0929 21:26:02.718108 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:02Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:02 crc kubenswrapper[4911]: I0929 21:26:02.733202 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a92f3bb864cba0c6615e8a07233e923c0d0681e83ad6722aa7c7043fb5966d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:02Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:02 crc kubenswrapper[4911]: I0929 21:26:02.745921 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cfbgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5b94596-b945-4a89-b362-ec649e8e7981\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5154aa6a1a3a9b1a5a58752607c01b955e28b7e896bf9b99b5cca2448077ebd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm7xf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1f8c9f36d4fa9ccfe9c4fdf2794b819a0de928056fc3c320b6c83f9c518258f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm7xf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:56Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-cfbgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:02Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:02 crc kubenswrapper[4911]: I0929 21:26:02.773017 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85fccbbf-637b-4195-89ba-828db0f10fb3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11c497bb84920c85649975dc5a3ba883723017d5503200e47c08427f45b9edfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a594d1677deac968c88fac4a64d4b047cf02ad8d6db74ee2ebe170d4135f588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2db0acd86c2dfcd0303626aa6b66832b18aec98dab908328288cdf0e0a18f24d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72188960402faa7956adf357824c6ea0a7015a75ed5191853ca95b12feb2cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf2588ddec2be0fa843630cdc268617c399a7a1615705cc4b088fc806031d557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0546d8270e2cbba06a241e421fa7ea130e53302a3f71f81829198d7ab8f3e6b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0546d8270e2cbba06a241e421fa7ea130e53302a3f71f81829198d7ab8f3e6b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123ea91d0a01f780d09196824c362eab68a4b8d943d414e80a3a0bbc2959e178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://123ea91d0a01f780d09196824c362eab68a4b8d943d414e80a3a0bbc2959e178\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://17751d3a9dd568ddb6c27efe08354f37aa4af7a91312e8c41e3e2e509d5be8ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://17751d3a9dd568ddb6c27efe08354f37aa4af7a91312e8c41e3e2e509d5be8ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:02Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:02 crc kubenswrapper[4911]: I0929 21:26:02.788423 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c6a0af8cc03379c6fa2a3969e5852a340c32bd8513dc7c963e074bb1694d436\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:02Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:02 crc kubenswrapper[4911]: I0929 21:26:02.803047 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dnhjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fe62a53-f0b8-4c66-8adf-3b9f8bef4195\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b11f03867ce6c678d04431a19903c21acd7665f0cca4c7b0344e9d1cf0e17e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64hpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dnhjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:02Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:02 crc kubenswrapper[4911]: I0929 21:26:02.821184 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:02 crc kubenswrapper[4911]: I0929 21:26:02.821258 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:02 crc kubenswrapper[4911]: I0929 21:26:02.821283 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:02 crc kubenswrapper[4911]: I0929 21:26:02.821331 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:02 crc kubenswrapper[4911]: I0929 21:26:02.821358 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:02Z","lastTransitionTime":"2025-09-29T21:26:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:02 crc kubenswrapper[4911]: I0929 21:26:02.826323 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bp485" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"364060da-3bac-4f3e-b8b8-a64b0441cb5e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7dba9267c23bc8990d70b320a688e96796cf75aca99f5a3ce12fa82327cb2d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7962008bbec5a6556b4ab76cf151a68e971bde07fdc5fe560c561f42cf597d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7962008bbec5a6556b4ab76cf151a68e971bde07fdc5fe560c561f42cf597d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a6af280cf9196a9ed2a1798f7f4e7d6504ef6707c1d265fb9b232c02d70c205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a6af280cf9196a9ed2a1798f7f4e7d6504ef6707c1d265fb9b232c02d70c205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4783145a17a1233241a3766d9b259d16ed82b2076ff6790c1a368d713271cd91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4783145a17a1233241a3766d9b259d16ed82b2076ff6790c1a368d713271cd91\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0bb5fec15ae4eb585d8ba1c0bedabe62a4094ca50044a09b44439dc555f064d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0bb5fec15ae4eb585d8ba1c0bedabe62a4094ca50044a09b44439dc555f064d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c153df9063b1a1e897e8d02e139d641d00d84dce42004be4c69dd55d44e7436b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c153df9063b1a1e897e8d02e139d641d00d84dce42004be4c69dd55d44e7436b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9acf5d7a17bf6bb6d1ad415d830594b8f6b986fbb7c58706b19d28546dfae6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9acf5d7a17bf6bb6d1ad415d830594b8f6b986fbb7c58706b19d28546dfae6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bp485\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:02Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:02 crc kubenswrapper[4911]: I0929 21:26:02.838385 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dz6zq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c16af8d-e647-4820-b96f-298cce113ab1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e64c28e47bb861649d58b34f5e3c369cfaf8c808435b32a13ebfc2c02b7b6db9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbgg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dz6zq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:02Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:02 crc kubenswrapper[4911]: I0929 21:26:02.867011 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w647f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50640abc-40db-4390-82d1-f3cfc76da71c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0479676aa753b6e60f9a57eb2a3055ddc5b10bce1547ac60e216c0865e79f24b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://568d099d7beea914cf9f9eb703e36dd179359d3f4406bae454334e39b0de79ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w647f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:02Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:02 crc kubenswrapper[4911]: I0929 21:26:02.881181 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-d5gdh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b53f9593-39bf-43e0-b1de-09192d0167cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-24xkh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-24xkh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:57Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-d5gdh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:02Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:02 crc kubenswrapper[4911]: I0929 21:26:02.900941 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f56832a-7fd7-428e-85ee-55c5dea7b67a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ff3cf4013e69405637fe1483d5eb8ab2d12a0ba222c8446d93445dcb94ac9b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b32578d3e75bb839ad134f88850ba1b26e57846431fac71b64c5ddbce802464\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4539e6f57be0c686d5ea6ccbf1537c7c93698f81d2fdcfe5a143ce1f014e09\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57c9f68393ebed4fd5fe219544b0585a743989f8c54239e19ec702bddf9c30f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9dfa222360c949632050bd1dc5453a14c0dfa0d4614282132ceea2297405443e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T21:25:36Z\\\",\\\"message\\\":\\\"W0929 21:25:25.867198 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 21:25:25.867655 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759181125 cert, and key in /tmp/serving-cert-2091738680/serving-signer.crt, /tmp/serving-cert-2091738680/serving-signer.key\\\\nI0929 21:25:26.230899 1 observer_polling.go:159] Starting file observer\\\\nW0929 21:25:26.235864 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 21:25:26.236118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 21:25:26.238856 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2091738680/tls.crt::/tmp/serving-cert-2091738680/tls.key\\\\\\\"\\\\nF0929 21:25:36.606155 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47cf5d1e65f887e84d3c0694d5038edf3451c4af14d08a29d48562d05e36ea26\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:02Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:02 crc kubenswrapper[4911]: I0929 21:26:02.918087 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8646497-f7ad-4b38-a69f-9a14345af1c8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d3bb437bdac371dd4e22f20f304a5381c128ac2b774c51bc4f748a1c7aa3834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://129d51f84c8798ed7b1078f34899d4231c7bf4e1a75212f694c6c027b8463b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://290e37c77297fae7dcb3540911f6b2917e26daee3623c9106e34de394ccfa01f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7088f5a3752b8778de247eacb408d511197d1501e8e2811a98a9b713ca1fca04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:02Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:02 crc kubenswrapper[4911]: I0929 21:26:02.923083 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:02 crc kubenswrapper[4911]: I0929 21:26:02.923115 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:02 crc kubenswrapper[4911]: I0929 21:26:02.923127 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:02 crc kubenswrapper[4911]: I0929 21:26:02.923142 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:02 crc kubenswrapper[4911]: I0929 21:26:02.923155 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:02Z","lastTransitionTime":"2025-09-29T21:26:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:02 crc kubenswrapper[4911]: I0929 21:26:02.939695 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:02Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:02 crc kubenswrapper[4911]: I0929 21:26:02.961451 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:02Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:02 crc kubenswrapper[4911]: I0929 21:26:02.982589 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4052e710e0a06bf38d26e6006248d090051284bc0eae7a68b17241ef8016909\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a7d7fad5efb67cc7ef796977f03cc9c0e69fcc4340ca28749fffcc13236ed8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:02Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:03 crc kubenswrapper[4911]: I0929 21:26:03.000126 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lrfbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1179c900-e866-4c5a-bb06-6032cc03a075\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bb16b2f889ccaf66b96ffea2f6585967c14530a95a95000b652ce817e97c37e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jwhfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lrfbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:02Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:03 crc kubenswrapper[4911]: I0929 21:26:03.022588 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e3aa70f-b0da-44c9-a850-96d4494b02fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3568f71a4a1762e0d9be8ce4c317471f3f99a7c5b92b7daeb9fbace778f7e66b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00681163bfe474dabf977e0c43ac71148c2a472efd4bc57f3d145ba1134a74aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db1941e9959584d925b6ac546c234ce0dfaa2960a1277f282b51deb5ee56ec87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e87c1e61cde26c6ae4496e13ebe8e46857c8656720399f3d89b9e9a75bb4681d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c06f6555e780f2f9f69cabcd3e26595acfde3e973387ed7b10345c673c17f5d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c62aa953604f4ce35ca351ecf4e03da067af96f004c3ba3ea5f3a54b89def33d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e367c219f65b163fea8802c80801d60df0804adc0f6d14f18ee540c114b4ba14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e367c219f65b163fea8802c80801d60df0804adc0f6d14f18ee540c114b4ba14\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T21:25:55Z\\\",\\\"message\\\":\\\"troller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:55Z is after 2025-08-24T17:21:41Z]\\\\nI0929 21:25:55.105695 6368 services_controller.go:453] Built service openshift-service-ca-operator/metrics template LB for network=default: []services.LB{}\\\\nI0929 21:25:55.105686 6368 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/cluster-autoscaler-operator_TCP_cluster\\\\\\\", UUID:\\\\\\\"54fbe873-7e6d-475f-a0ad-8dd5f06d850d\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/cluster-autoscaler-operator\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/cluster-autoscaler-operator_TCP_cluster\\\\\\\", UUI\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:54Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-9wxd8_openshift-ovn-kubernetes(4e3aa70f-b0da-44c9-a850-96d4494b02fc)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68ec54cc6e84d45b9550a2dcbd53b3ffc8cc97cb94da8d9961b29965af08b7e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9wxd8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:03Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:03 crc kubenswrapper[4911]: I0929 21:26:03.025627 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:03 crc kubenswrapper[4911]: I0929 21:26:03.025676 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:03 crc kubenswrapper[4911]: I0929 21:26:03.025686 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:03 crc kubenswrapper[4911]: I0929 21:26:03.025702 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:03 crc kubenswrapper[4911]: I0929 21:26:03.025712 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:03Z","lastTransitionTime":"2025-09-29T21:26:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:03 crc kubenswrapper[4911]: I0929 21:26:03.128466 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:03 crc kubenswrapper[4911]: I0929 21:26:03.128499 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:03 crc kubenswrapper[4911]: I0929 21:26:03.128508 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:03 crc kubenswrapper[4911]: I0929 21:26:03.128525 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:03 crc kubenswrapper[4911]: I0929 21:26:03.128534 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:03Z","lastTransitionTime":"2025-09-29T21:26:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:03 crc kubenswrapper[4911]: I0929 21:26:03.231299 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:03 crc kubenswrapper[4911]: I0929 21:26:03.231341 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:03 crc kubenswrapper[4911]: I0929 21:26:03.231351 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:03 crc kubenswrapper[4911]: I0929 21:26:03.231369 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:03 crc kubenswrapper[4911]: I0929 21:26:03.231383 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:03Z","lastTransitionTime":"2025-09-29T21:26:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:03 crc kubenswrapper[4911]: I0929 21:26:03.334369 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:03 crc kubenswrapper[4911]: I0929 21:26:03.334415 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:03 crc kubenswrapper[4911]: I0929 21:26:03.334423 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:03 crc kubenswrapper[4911]: I0929 21:26:03.334442 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:03 crc kubenswrapper[4911]: I0929 21:26:03.334454 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:03Z","lastTransitionTime":"2025-09-29T21:26:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:03 crc kubenswrapper[4911]: I0929 21:26:03.437673 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:03 crc kubenswrapper[4911]: I0929 21:26:03.437721 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:03 crc kubenswrapper[4911]: I0929 21:26:03.437733 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:03 crc kubenswrapper[4911]: I0929 21:26:03.437751 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:03 crc kubenswrapper[4911]: I0929 21:26:03.437762 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:03Z","lastTransitionTime":"2025-09-29T21:26:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:03 crc kubenswrapper[4911]: I0929 21:26:03.539918 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:03 crc kubenswrapper[4911]: I0929 21:26:03.539992 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:03 crc kubenswrapper[4911]: I0929 21:26:03.540014 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:03 crc kubenswrapper[4911]: I0929 21:26:03.540038 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:03 crc kubenswrapper[4911]: I0929 21:26:03.540050 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:03Z","lastTransitionTime":"2025-09-29T21:26:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:03 crc kubenswrapper[4911]: I0929 21:26:03.642762 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:03 crc kubenswrapper[4911]: I0929 21:26:03.642886 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:03 crc kubenswrapper[4911]: I0929 21:26:03.642919 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:03 crc kubenswrapper[4911]: I0929 21:26:03.642954 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:03 crc kubenswrapper[4911]: I0929 21:26:03.642976 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:03Z","lastTransitionTime":"2025-09-29T21:26:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:03 crc kubenswrapper[4911]: I0929 21:26:03.700873 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 21:26:03 crc kubenswrapper[4911]: E0929 21:26:03.701065 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 21:26:03 crc kubenswrapper[4911]: I0929 21:26:03.746250 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:03 crc kubenswrapper[4911]: I0929 21:26:03.746329 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:03 crc kubenswrapper[4911]: I0929 21:26:03.746349 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:03 crc kubenswrapper[4911]: I0929 21:26:03.746380 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:03 crc kubenswrapper[4911]: I0929 21:26:03.746409 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:03Z","lastTransitionTime":"2025-09-29T21:26:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:03 crc kubenswrapper[4911]: I0929 21:26:03.851372 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:03 crc kubenswrapper[4911]: I0929 21:26:03.851442 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:03 crc kubenswrapper[4911]: I0929 21:26:03.851459 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:03 crc kubenswrapper[4911]: I0929 21:26:03.851487 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:03 crc kubenswrapper[4911]: I0929 21:26:03.851508 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:03Z","lastTransitionTime":"2025-09-29T21:26:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:03 crc kubenswrapper[4911]: I0929 21:26:03.954557 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:03 crc kubenswrapper[4911]: I0929 21:26:03.954607 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:03 crc kubenswrapper[4911]: I0929 21:26:03.954618 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:03 crc kubenswrapper[4911]: I0929 21:26:03.954637 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:03 crc kubenswrapper[4911]: I0929 21:26:03.954649 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:03Z","lastTransitionTime":"2025-09-29T21:26:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:04 crc kubenswrapper[4911]: I0929 21:26:04.057916 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:04 crc kubenswrapper[4911]: I0929 21:26:04.058004 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:04 crc kubenswrapper[4911]: I0929 21:26:04.058032 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:04 crc kubenswrapper[4911]: I0929 21:26:04.058069 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:04 crc kubenswrapper[4911]: I0929 21:26:04.058096 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:04Z","lastTransitionTime":"2025-09-29T21:26:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:04 crc kubenswrapper[4911]: I0929 21:26:04.160652 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:04 crc kubenswrapper[4911]: I0929 21:26:04.160777 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:04 crc kubenswrapper[4911]: I0929 21:26:04.160843 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:04 crc kubenswrapper[4911]: I0929 21:26:04.160882 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:04 crc kubenswrapper[4911]: I0929 21:26:04.160910 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:04Z","lastTransitionTime":"2025-09-29T21:26:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:04 crc kubenswrapper[4911]: I0929 21:26:04.264334 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:04 crc kubenswrapper[4911]: I0929 21:26:04.264376 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:04 crc kubenswrapper[4911]: I0929 21:26:04.264387 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:04 crc kubenswrapper[4911]: I0929 21:26:04.264434 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:04 crc kubenswrapper[4911]: I0929 21:26:04.264448 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:04Z","lastTransitionTime":"2025-09-29T21:26:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:04 crc kubenswrapper[4911]: I0929 21:26:04.368016 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:04 crc kubenswrapper[4911]: I0929 21:26:04.368366 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:04 crc kubenswrapper[4911]: I0929 21:26:04.368502 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:04 crc kubenswrapper[4911]: I0929 21:26:04.368611 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:04 crc kubenswrapper[4911]: I0929 21:26:04.368706 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:04Z","lastTransitionTime":"2025-09-29T21:26:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:04 crc kubenswrapper[4911]: I0929 21:26:04.472343 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:04 crc kubenswrapper[4911]: I0929 21:26:04.472587 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:04 crc kubenswrapper[4911]: I0929 21:26:04.472682 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:04 crc kubenswrapper[4911]: I0929 21:26:04.472760 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:04 crc kubenswrapper[4911]: I0929 21:26:04.472858 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:04Z","lastTransitionTime":"2025-09-29T21:26:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:04 crc kubenswrapper[4911]: I0929 21:26:04.575308 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:04 crc kubenswrapper[4911]: I0929 21:26:04.575627 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:04 crc kubenswrapper[4911]: I0929 21:26:04.576267 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:04 crc kubenswrapper[4911]: I0929 21:26:04.576483 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:04 crc kubenswrapper[4911]: I0929 21:26:04.576713 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:04Z","lastTransitionTime":"2025-09-29T21:26:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:04 crc kubenswrapper[4911]: I0929 21:26:04.680781 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:04 crc kubenswrapper[4911]: I0929 21:26:04.680880 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:04 crc kubenswrapper[4911]: I0929 21:26:04.680891 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:04 crc kubenswrapper[4911]: I0929 21:26:04.680909 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:04 crc kubenswrapper[4911]: I0929 21:26:04.680929 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:04Z","lastTransitionTime":"2025-09-29T21:26:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:04 crc kubenswrapper[4911]: I0929 21:26:04.700249 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-d5gdh" Sep 29 21:26:04 crc kubenswrapper[4911]: I0929 21:26:04.700315 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 21:26:04 crc kubenswrapper[4911]: I0929 21:26:04.700283 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:26:04 crc kubenswrapper[4911]: E0929 21:26:04.700440 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-d5gdh" podUID="b53f9593-39bf-43e0-b1de-09192d0167cd" Sep 29 21:26:04 crc kubenswrapper[4911]: E0929 21:26:04.700505 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 21:26:04 crc kubenswrapper[4911]: E0929 21:26:04.700578 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 21:26:04 crc kubenswrapper[4911]: I0929 21:26:04.784239 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:04 crc kubenswrapper[4911]: I0929 21:26:04.784312 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:04 crc kubenswrapper[4911]: I0929 21:26:04.784345 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:04 crc kubenswrapper[4911]: I0929 21:26:04.784372 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:04 crc kubenswrapper[4911]: I0929 21:26:04.784390 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:04Z","lastTransitionTime":"2025-09-29T21:26:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:04 crc kubenswrapper[4911]: I0929 21:26:04.888797 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:04 crc kubenswrapper[4911]: I0929 21:26:04.888901 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:04 crc kubenswrapper[4911]: I0929 21:26:04.888922 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:04 crc kubenswrapper[4911]: I0929 21:26:04.888948 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:04 crc kubenswrapper[4911]: I0929 21:26:04.888975 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:04Z","lastTransitionTime":"2025-09-29T21:26:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:04 crc kubenswrapper[4911]: I0929 21:26:04.992361 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:04 crc kubenswrapper[4911]: I0929 21:26:04.992419 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:04 crc kubenswrapper[4911]: I0929 21:26:04.992452 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:04 crc kubenswrapper[4911]: I0929 21:26:04.992473 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:04 crc kubenswrapper[4911]: I0929 21:26:04.992486 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:04Z","lastTransitionTime":"2025-09-29T21:26:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:05 crc kubenswrapper[4911]: I0929 21:26:05.084284 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b53f9593-39bf-43e0-b1de-09192d0167cd-metrics-certs\") pod \"network-metrics-daemon-d5gdh\" (UID: \"b53f9593-39bf-43e0-b1de-09192d0167cd\") " pod="openshift-multus/network-metrics-daemon-d5gdh" Sep 29 21:26:05 crc kubenswrapper[4911]: E0929 21:26:05.084571 4911 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 21:26:05 crc kubenswrapper[4911]: E0929 21:26:05.084690 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b53f9593-39bf-43e0-b1de-09192d0167cd-metrics-certs podName:b53f9593-39bf-43e0-b1de-09192d0167cd nodeName:}" failed. No retries permitted until 2025-09-29 21:26:13.084665315 +0000 UTC m=+51.061777986 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/b53f9593-39bf-43e0-b1de-09192d0167cd-metrics-certs") pod "network-metrics-daemon-d5gdh" (UID: "b53f9593-39bf-43e0-b1de-09192d0167cd") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 21:26:05 crc kubenswrapper[4911]: I0929 21:26:05.095322 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:05 crc kubenswrapper[4911]: I0929 21:26:05.095370 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:05 crc kubenswrapper[4911]: I0929 21:26:05.095382 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:05 crc kubenswrapper[4911]: I0929 21:26:05.095398 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:05 crc kubenswrapper[4911]: I0929 21:26:05.095410 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:05Z","lastTransitionTime":"2025-09-29T21:26:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:05 crc kubenswrapper[4911]: I0929 21:26:05.198640 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:05 crc kubenswrapper[4911]: I0929 21:26:05.198899 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:05 crc kubenswrapper[4911]: I0929 21:26:05.198932 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:05 crc kubenswrapper[4911]: I0929 21:26:05.198971 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:05 crc kubenswrapper[4911]: I0929 21:26:05.199000 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:05Z","lastTransitionTime":"2025-09-29T21:26:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:05 crc kubenswrapper[4911]: I0929 21:26:05.311950 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:05 crc kubenswrapper[4911]: I0929 21:26:05.312068 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:05 crc kubenswrapper[4911]: I0929 21:26:05.312094 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:05 crc kubenswrapper[4911]: I0929 21:26:05.312286 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:05 crc kubenswrapper[4911]: I0929 21:26:05.312319 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:05Z","lastTransitionTime":"2025-09-29T21:26:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:05 crc kubenswrapper[4911]: I0929 21:26:05.416314 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:05 crc kubenswrapper[4911]: I0929 21:26:05.416386 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:05 crc kubenswrapper[4911]: I0929 21:26:05.416405 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:05 crc kubenswrapper[4911]: I0929 21:26:05.416434 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:05 crc kubenswrapper[4911]: I0929 21:26:05.416454 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:05Z","lastTransitionTime":"2025-09-29T21:26:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:05 crc kubenswrapper[4911]: I0929 21:26:05.520486 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:05 crc kubenswrapper[4911]: I0929 21:26:05.520557 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:05 crc kubenswrapper[4911]: I0929 21:26:05.520577 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:05 crc kubenswrapper[4911]: I0929 21:26:05.520606 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:05 crc kubenswrapper[4911]: I0929 21:26:05.520634 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:05Z","lastTransitionTime":"2025-09-29T21:26:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:05 crc kubenswrapper[4911]: I0929 21:26:05.623479 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:05 crc kubenswrapper[4911]: I0929 21:26:05.623546 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:05 crc kubenswrapper[4911]: I0929 21:26:05.623564 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:05 crc kubenswrapper[4911]: I0929 21:26:05.623591 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:05 crc kubenswrapper[4911]: I0929 21:26:05.623613 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:05Z","lastTransitionTime":"2025-09-29T21:26:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:05 crc kubenswrapper[4911]: I0929 21:26:05.699995 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 21:26:05 crc kubenswrapper[4911]: E0929 21:26:05.700190 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 21:26:05 crc kubenswrapper[4911]: I0929 21:26:05.727637 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:05 crc kubenswrapper[4911]: I0929 21:26:05.727697 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:05 crc kubenswrapper[4911]: I0929 21:26:05.727715 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:05 crc kubenswrapper[4911]: I0929 21:26:05.727743 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:05 crc kubenswrapper[4911]: I0929 21:26:05.727763 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:05Z","lastTransitionTime":"2025-09-29T21:26:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:05 crc kubenswrapper[4911]: I0929 21:26:05.831499 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:05 crc kubenswrapper[4911]: I0929 21:26:05.831585 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:05 crc kubenswrapper[4911]: I0929 21:26:05.831597 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:05 crc kubenswrapper[4911]: I0929 21:26:05.831618 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:05 crc kubenswrapper[4911]: I0929 21:26:05.831632 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:05Z","lastTransitionTime":"2025-09-29T21:26:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:05 crc kubenswrapper[4911]: I0929 21:26:05.935244 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:05 crc kubenswrapper[4911]: I0929 21:26:05.935312 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:05 crc kubenswrapper[4911]: I0929 21:26:05.935332 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:05 crc kubenswrapper[4911]: I0929 21:26:05.935362 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:05 crc kubenswrapper[4911]: I0929 21:26:05.935382 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:05Z","lastTransitionTime":"2025-09-29T21:26:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:06 crc kubenswrapper[4911]: I0929 21:26:06.038194 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:06 crc kubenswrapper[4911]: I0929 21:26:06.038276 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:06 crc kubenswrapper[4911]: I0929 21:26:06.038300 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:06 crc kubenswrapper[4911]: I0929 21:26:06.038332 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:06 crc kubenswrapper[4911]: I0929 21:26:06.038352 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:06Z","lastTransitionTime":"2025-09-29T21:26:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:06 crc kubenswrapper[4911]: I0929 21:26:06.141147 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:06 crc kubenswrapper[4911]: I0929 21:26:06.141201 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:06 crc kubenswrapper[4911]: I0929 21:26:06.141218 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:06 crc kubenswrapper[4911]: I0929 21:26:06.141239 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:06 crc kubenswrapper[4911]: I0929 21:26:06.141253 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:06Z","lastTransitionTime":"2025-09-29T21:26:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:06 crc kubenswrapper[4911]: I0929 21:26:06.244438 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:06 crc kubenswrapper[4911]: I0929 21:26:06.244495 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:06 crc kubenswrapper[4911]: I0929 21:26:06.244511 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:06 crc kubenswrapper[4911]: I0929 21:26:06.244535 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:06 crc kubenswrapper[4911]: I0929 21:26:06.244552 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:06Z","lastTransitionTime":"2025-09-29T21:26:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:06 crc kubenswrapper[4911]: I0929 21:26:06.347685 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:06 crc kubenswrapper[4911]: I0929 21:26:06.347739 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:06 crc kubenswrapper[4911]: I0929 21:26:06.347752 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:06 crc kubenswrapper[4911]: I0929 21:26:06.347778 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:06 crc kubenswrapper[4911]: I0929 21:26:06.347811 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:06Z","lastTransitionTime":"2025-09-29T21:26:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:06 crc kubenswrapper[4911]: I0929 21:26:06.451029 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:06 crc kubenswrapper[4911]: I0929 21:26:06.451080 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:06 crc kubenswrapper[4911]: I0929 21:26:06.451091 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:06 crc kubenswrapper[4911]: I0929 21:26:06.451109 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:06 crc kubenswrapper[4911]: I0929 21:26:06.451122 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:06Z","lastTransitionTime":"2025-09-29T21:26:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:06 crc kubenswrapper[4911]: I0929 21:26:06.554364 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:06 crc kubenswrapper[4911]: I0929 21:26:06.554419 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:06 crc kubenswrapper[4911]: I0929 21:26:06.554460 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:06 crc kubenswrapper[4911]: I0929 21:26:06.554484 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:06 crc kubenswrapper[4911]: I0929 21:26:06.554503 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:06Z","lastTransitionTime":"2025-09-29T21:26:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:06 crc kubenswrapper[4911]: I0929 21:26:06.657835 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:06 crc kubenswrapper[4911]: I0929 21:26:06.657913 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:06 crc kubenswrapper[4911]: I0929 21:26:06.657932 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:06 crc kubenswrapper[4911]: I0929 21:26:06.657958 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:06 crc kubenswrapper[4911]: I0929 21:26:06.657977 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:06Z","lastTransitionTime":"2025-09-29T21:26:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:06 crc kubenswrapper[4911]: I0929 21:26:06.701295 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 21:26:06 crc kubenswrapper[4911]: I0929 21:26:06.701473 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:26:06 crc kubenswrapper[4911]: E0929 21:26:06.701531 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 21:26:06 crc kubenswrapper[4911]: E0929 21:26:06.701727 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 21:26:06 crc kubenswrapper[4911]: I0929 21:26:06.701941 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-d5gdh" Sep 29 21:26:06 crc kubenswrapper[4911]: E0929 21:26:06.702162 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-d5gdh" podUID="b53f9593-39bf-43e0-b1de-09192d0167cd" Sep 29 21:26:06 crc kubenswrapper[4911]: I0929 21:26:06.761718 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:06 crc kubenswrapper[4911]: I0929 21:26:06.761829 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:06 crc kubenswrapper[4911]: I0929 21:26:06.761856 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:06 crc kubenswrapper[4911]: I0929 21:26:06.761893 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:06 crc kubenswrapper[4911]: I0929 21:26:06.761918 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:06Z","lastTransitionTime":"2025-09-29T21:26:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:06 crc kubenswrapper[4911]: I0929 21:26:06.864763 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:06 crc kubenswrapper[4911]: I0929 21:26:06.864853 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:06 crc kubenswrapper[4911]: I0929 21:26:06.864868 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:06 crc kubenswrapper[4911]: I0929 21:26:06.864888 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:06 crc kubenswrapper[4911]: I0929 21:26:06.864903 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:06Z","lastTransitionTime":"2025-09-29T21:26:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:06 crc kubenswrapper[4911]: I0929 21:26:06.968263 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:06 crc kubenswrapper[4911]: I0929 21:26:06.968327 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:06 crc kubenswrapper[4911]: I0929 21:26:06.968336 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:06 crc kubenswrapper[4911]: I0929 21:26:06.968361 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:06 crc kubenswrapper[4911]: I0929 21:26:06.968373 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:06Z","lastTransitionTime":"2025-09-29T21:26:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:07 crc kubenswrapper[4911]: I0929 21:26:07.071142 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:07 crc kubenswrapper[4911]: I0929 21:26:07.071203 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:07 crc kubenswrapper[4911]: I0929 21:26:07.071215 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:07 crc kubenswrapper[4911]: I0929 21:26:07.071233 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:07 crc kubenswrapper[4911]: I0929 21:26:07.071246 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:07Z","lastTransitionTime":"2025-09-29T21:26:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:07 crc kubenswrapper[4911]: I0929 21:26:07.174110 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:07 crc kubenswrapper[4911]: I0929 21:26:07.174165 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:07 crc kubenswrapper[4911]: I0929 21:26:07.174180 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:07 crc kubenswrapper[4911]: I0929 21:26:07.174200 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:07 crc kubenswrapper[4911]: I0929 21:26:07.174212 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:07Z","lastTransitionTime":"2025-09-29T21:26:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:07 crc kubenswrapper[4911]: I0929 21:26:07.277900 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:07 crc kubenswrapper[4911]: I0929 21:26:07.277965 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:07 crc kubenswrapper[4911]: I0929 21:26:07.277979 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:07 crc kubenswrapper[4911]: I0929 21:26:07.278002 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:07 crc kubenswrapper[4911]: I0929 21:26:07.278015 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:07Z","lastTransitionTime":"2025-09-29T21:26:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:07 crc kubenswrapper[4911]: I0929 21:26:07.380348 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:07 crc kubenswrapper[4911]: I0929 21:26:07.380384 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:07 crc kubenswrapper[4911]: I0929 21:26:07.380397 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:07 crc kubenswrapper[4911]: I0929 21:26:07.380414 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:07 crc kubenswrapper[4911]: I0929 21:26:07.380425 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:07Z","lastTransitionTime":"2025-09-29T21:26:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:07 crc kubenswrapper[4911]: I0929 21:26:07.483333 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:07 crc kubenswrapper[4911]: I0929 21:26:07.483389 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:07 crc kubenswrapper[4911]: I0929 21:26:07.483405 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:07 crc kubenswrapper[4911]: I0929 21:26:07.483427 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:07 crc kubenswrapper[4911]: I0929 21:26:07.483440 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:07Z","lastTransitionTime":"2025-09-29T21:26:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:07 crc kubenswrapper[4911]: I0929 21:26:07.586448 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:07 crc kubenswrapper[4911]: I0929 21:26:07.586498 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:07 crc kubenswrapper[4911]: I0929 21:26:07.586532 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:07 crc kubenswrapper[4911]: I0929 21:26:07.586556 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:07 crc kubenswrapper[4911]: I0929 21:26:07.586570 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:07Z","lastTransitionTime":"2025-09-29T21:26:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:07 crc kubenswrapper[4911]: I0929 21:26:07.689250 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:07 crc kubenswrapper[4911]: I0929 21:26:07.689334 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:07 crc kubenswrapper[4911]: I0929 21:26:07.689357 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:07 crc kubenswrapper[4911]: I0929 21:26:07.689387 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:07 crc kubenswrapper[4911]: I0929 21:26:07.689406 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:07Z","lastTransitionTime":"2025-09-29T21:26:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:07 crc kubenswrapper[4911]: I0929 21:26:07.700540 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 21:26:07 crc kubenswrapper[4911]: E0929 21:26:07.700696 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 21:26:07 crc kubenswrapper[4911]: I0929 21:26:07.792364 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:07 crc kubenswrapper[4911]: I0929 21:26:07.792419 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:07 crc kubenswrapper[4911]: I0929 21:26:07.792428 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:07 crc kubenswrapper[4911]: I0929 21:26:07.792447 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:07 crc kubenswrapper[4911]: I0929 21:26:07.792458 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:07Z","lastTransitionTime":"2025-09-29T21:26:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:07 crc kubenswrapper[4911]: I0929 21:26:07.895615 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:07 crc kubenswrapper[4911]: I0929 21:26:07.895667 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:07 crc kubenswrapper[4911]: I0929 21:26:07.895677 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:07 crc kubenswrapper[4911]: I0929 21:26:07.895696 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:07 crc kubenswrapper[4911]: I0929 21:26:07.895710 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:07Z","lastTransitionTime":"2025-09-29T21:26:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:07 crc kubenswrapper[4911]: I0929 21:26:07.998145 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:07 crc kubenswrapper[4911]: I0929 21:26:07.998191 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:07 crc kubenswrapper[4911]: I0929 21:26:07.998203 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:07 crc kubenswrapper[4911]: I0929 21:26:07.998225 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:07 crc kubenswrapper[4911]: I0929 21:26:07.998237 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:07Z","lastTransitionTime":"2025-09-29T21:26:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:08 crc kubenswrapper[4911]: I0929 21:26:08.106066 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:08 crc kubenswrapper[4911]: I0929 21:26:08.106145 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:08 crc kubenswrapper[4911]: I0929 21:26:08.106169 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:08 crc kubenswrapper[4911]: I0929 21:26:08.106202 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:08 crc kubenswrapper[4911]: I0929 21:26:08.106222 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:08Z","lastTransitionTime":"2025-09-29T21:26:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:08 crc kubenswrapper[4911]: I0929 21:26:08.208896 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:08 crc kubenswrapper[4911]: I0929 21:26:08.208941 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:08 crc kubenswrapper[4911]: I0929 21:26:08.208954 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:08 crc kubenswrapper[4911]: I0929 21:26:08.208973 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:08 crc kubenswrapper[4911]: I0929 21:26:08.209009 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:08Z","lastTransitionTime":"2025-09-29T21:26:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:08 crc kubenswrapper[4911]: I0929 21:26:08.312149 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:08 crc kubenswrapper[4911]: I0929 21:26:08.312248 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:08 crc kubenswrapper[4911]: I0929 21:26:08.312264 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:08 crc kubenswrapper[4911]: I0929 21:26:08.312286 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:08 crc kubenswrapper[4911]: I0929 21:26:08.312301 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:08Z","lastTransitionTime":"2025-09-29T21:26:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:08 crc kubenswrapper[4911]: I0929 21:26:08.414589 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:08 crc kubenswrapper[4911]: I0929 21:26:08.414628 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:08 crc kubenswrapper[4911]: I0929 21:26:08.414638 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:08 crc kubenswrapper[4911]: I0929 21:26:08.414654 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:08 crc kubenswrapper[4911]: I0929 21:26:08.414667 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:08Z","lastTransitionTime":"2025-09-29T21:26:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:08 crc kubenswrapper[4911]: I0929 21:26:08.516698 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:08 crc kubenswrapper[4911]: I0929 21:26:08.516756 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:08 crc kubenswrapper[4911]: I0929 21:26:08.516766 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:08 crc kubenswrapper[4911]: I0929 21:26:08.516780 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:08 crc kubenswrapper[4911]: I0929 21:26:08.516884 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:08Z","lastTransitionTime":"2025-09-29T21:26:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:08 crc kubenswrapper[4911]: I0929 21:26:08.619634 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:08 crc kubenswrapper[4911]: I0929 21:26:08.619689 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:08 crc kubenswrapper[4911]: I0929 21:26:08.619701 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:08 crc kubenswrapper[4911]: I0929 21:26:08.619720 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:08 crc kubenswrapper[4911]: I0929 21:26:08.619731 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:08Z","lastTransitionTime":"2025-09-29T21:26:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:08 crc kubenswrapper[4911]: I0929 21:26:08.700669 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 21:26:08 crc kubenswrapper[4911]: I0929 21:26:08.700738 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:26:08 crc kubenswrapper[4911]: I0929 21:26:08.700849 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-d5gdh" Sep 29 21:26:08 crc kubenswrapper[4911]: E0929 21:26:08.700986 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 21:26:08 crc kubenswrapper[4911]: E0929 21:26:08.701149 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 21:26:08 crc kubenswrapper[4911]: E0929 21:26:08.701379 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-d5gdh" podUID="b53f9593-39bf-43e0-b1de-09192d0167cd" Sep 29 21:26:08 crc kubenswrapper[4911]: I0929 21:26:08.723406 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:08 crc kubenswrapper[4911]: I0929 21:26:08.723460 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:08 crc kubenswrapper[4911]: I0929 21:26:08.723494 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:08 crc kubenswrapper[4911]: I0929 21:26:08.723514 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:08 crc kubenswrapper[4911]: I0929 21:26:08.723526 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:08Z","lastTransitionTime":"2025-09-29T21:26:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:08 crc kubenswrapper[4911]: I0929 21:26:08.827883 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:08 crc kubenswrapper[4911]: I0929 21:26:08.827930 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:08 crc kubenswrapper[4911]: I0929 21:26:08.827939 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:08 crc kubenswrapper[4911]: I0929 21:26:08.827973 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:08 crc kubenswrapper[4911]: I0929 21:26:08.827982 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:08Z","lastTransitionTime":"2025-09-29T21:26:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:08 crc kubenswrapper[4911]: I0929 21:26:08.931455 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:08 crc kubenswrapper[4911]: I0929 21:26:08.931507 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:08 crc kubenswrapper[4911]: I0929 21:26:08.931522 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:08 crc kubenswrapper[4911]: I0929 21:26:08.931542 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:08 crc kubenswrapper[4911]: I0929 21:26:08.931554 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:08Z","lastTransitionTime":"2025-09-29T21:26:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:09 crc kubenswrapper[4911]: I0929 21:26:09.035579 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:09 crc kubenswrapper[4911]: I0929 21:26:09.035657 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:09 crc kubenswrapper[4911]: I0929 21:26:09.035676 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:09 crc kubenswrapper[4911]: I0929 21:26:09.035701 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:09 crc kubenswrapper[4911]: I0929 21:26:09.035719 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:09Z","lastTransitionTime":"2025-09-29T21:26:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:09 crc kubenswrapper[4911]: I0929 21:26:09.131012 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:09 crc kubenswrapper[4911]: I0929 21:26:09.131079 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:09 crc kubenswrapper[4911]: I0929 21:26:09.131098 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:09 crc kubenswrapper[4911]: I0929 21:26:09.131123 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:09 crc kubenswrapper[4911]: I0929 21:26:09.131140 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:09Z","lastTransitionTime":"2025-09-29T21:26:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:09 crc kubenswrapper[4911]: E0929 21:26:09.148421 4911 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7dded9b5-8ab5-45b2-be5a-4613b6e8208f\\\",\\\"systemUUID\\\":\\\"6cb362cf-0841-40fb-a840-f46642f78745\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:09Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:09 crc kubenswrapper[4911]: I0929 21:26:09.153959 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:09 crc kubenswrapper[4911]: I0929 21:26:09.154017 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:09 crc kubenswrapper[4911]: I0929 21:26:09.154035 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:09 crc kubenswrapper[4911]: I0929 21:26:09.154062 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:09 crc kubenswrapper[4911]: I0929 21:26:09.154083 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:09Z","lastTransitionTime":"2025-09-29T21:26:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:09 crc kubenswrapper[4911]: E0929 21:26:09.170597 4911 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7dded9b5-8ab5-45b2-be5a-4613b6e8208f\\\",\\\"systemUUID\\\":\\\"6cb362cf-0841-40fb-a840-f46642f78745\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:09Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:09 crc kubenswrapper[4911]: I0929 21:26:09.176189 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:09 crc kubenswrapper[4911]: I0929 21:26:09.176246 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:09 crc kubenswrapper[4911]: I0929 21:26:09.176266 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:09 crc kubenswrapper[4911]: I0929 21:26:09.176291 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:09 crc kubenswrapper[4911]: I0929 21:26:09.176314 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:09Z","lastTransitionTime":"2025-09-29T21:26:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:09 crc kubenswrapper[4911]: E0929 21:26:09.198246 4911 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7dded9b5-8ab5-45b2-be5a-4613b6e8208f\\\",\\\"systemUUID\\\":\\\"6cb362cf-0841-40fb-a840-f46642f78745\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:09Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:09 crc kubenswrapper[4911]: I0929 21:26:09.204328 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:09 crc kubenswrapper[4911]: I0929 21:26:09.204370 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:09 crc kubenswrapper[4911]: I0929 21:26:09.204404 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:09 crc kubenswrapper[4911]: I0929 21:26:09.204427 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:09 crc kubenswrapper[4911]: I0929 21:26:09.204443 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:09Z","lastTransitionTime":"2025-09-29T21:26:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:09 crc kubenswrapper[4911]: E0929 21:26:09.230596 4911 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7dded9b5-8ab5-45b2-be5a-4613b6e8208f\\\",\\\"systemUUID\\\":\\\"6cb362cf-0841-40fb-a840-f46642f78745\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:09Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:09 crc kubenswrapper[4911]: I0929 21:26:09.236975 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:09 crc kubenswrapper[4911]: I0929 21:26:09.237066 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:09 crc kubenswrapper[4911]: I0929 21:26:09.237093 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:09 crc kubenswrapper[4911]: I0929 21:26:09.237128 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:09 crc kubenswrapper[4911]: I0929 21:26:09.237156 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:09Z","lastTransitionTime":"2025-09-29T21:26:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:09 crc kubenswrapper[4911]: E0929 21:26:09.261286 4911 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7dded9b5-8ab5-45b2-be5a-4613b6e8208f\\\",\\\"systemUUID\\\":\\\"6cb362cf-0841-40fb-a840-f46642f78745\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:09Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:09 crc kubenswrapper[4911]: E0929 21:26:09.261618 4911 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 29 21:26:09 crc kubenswrapper[4911]: I0929 21:26:09.265296 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:09 crc kubenswrapper[4911]: I0929 21:26:09.265362 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:09 crc kubenswrapper[4911]: I0929 21:26:09.265375 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:09 crc kubenswrapper[4911]: I0929 21:26:09.265394 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:09 crc kubenswrapper[4911]: I0929 21:26:09.265406 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:09Z","lastTransitionTime":"2025-09-29T21:26:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:09 crc kubenswrapper[4911]: I0929 21:26:09.368980 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:09 crc kubenswrapper[4911]: I0929 21:26:09.369035 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:09 crc kubenswrapper[4911]: I0929 21:26:09.369045 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:09 crc kubenswrapper[4911]: I0929 21:26:09.369067 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:09 crc kubenswrapper[4911]: I0929 21:26:09.369081 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:09Z","lastTransitionTime":"2025-09-29T21:26:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:09 crc kubenswrapper[4911]: I0929 21:26:09.472234 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:09 crc kubenswrapper[4911]: I0929 21:26:09.472282 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:09 crc kubenswrapper[4911]: I0929 21:26:09.472293 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:09 crc kubenswrapper[4911]: I0929 21:26:09.472315 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:09 crc kubenswrapper[4911]: I0929 21:26:09.472328 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:09Z","lastTransitionTime":"2025-09-29T21:26:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:09 crc kubenswrapper[4911]: I0929 21:26:09.576162 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:09 crc kubenswrapper[4911]: I0929 21:26:09.576224 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:09 crc kubenswrapper[4911]: I0929 21:26:09.576243 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:09 crc kubenswrapper[4911]: I0929 21:26:09.576268 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:09 crc kubenswrapper[4911]: I0929 21:26:09.576288 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:09Z","lastTransitionTime":"2025-09-29T21:26:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:09 crc kubenswrapper[4911]: I0929 21:26:09.684441 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:09 crc kubenswrapper[4911]: I0929 21:26:09.684512 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:09 crc kubenswrapper[4911]: I0929 21:26:09.684531 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:09 crc kubenswrapper[4911]: I0929 21:26:09.685077 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:09 crc kubenswrapper[4911]: I0929 21:26:09.685132 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:09Z","lastTransitionTime":"2025-09-29T21:26:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:09 crc kubenswrapper[4911]: I0929 21:26:09.700999 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 21:26:09 crc kubenswrapper[4911]: E0929 21:26:09.701158 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 21:26:09 crc kubenswrapper[4911]: I0929 21:26:09.787925 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:09 crc kubenswrapper[4911]: I0929 21:26:09.787965 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:09 crc kubenswrapper[4911]: I0929 21:26:09.787974 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:09 crc kubenswrapper[4911]: I0929 21:26:09.787987 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:09 crc kubenswrapper[4911]: I0929 21:26:09.787997 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:09Z","lastTransitionTime":"2025-09-29T21:26:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:09 crc kubenswrapper[4911]: I0929 21:26:09.891422 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:09 crc kubenswrapper[4911]: I0929 21:26:09.891497 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:09 crc kubenswrapper[4911]: I0929 21:26:09.891515 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:09 crc kubenswrapper[4911]: I0929 21:26:09.891542 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:09 crc kubenswrapper[4911]: I0929 21:26:09.891562 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:09Z","lastTransitionTime":"2025-09-29T21:26:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:09 crc kubenswrapper[4911]: I0929 21:26:09.994883 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:09 crc kubenswrapper[4911]: I0929 21:26:09.994965 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:09 crc kubenswrapper[4911]: I0929 21:26:09.994989 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:09 crc kubenswrapper[4911]: I0929 21:26:09.995022 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:09 crc kubenswrapper[4911]: I0929 21:26:09.995053 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:09Z","lastTransitionTime":"2025-09-29T21:26:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:10 crc kubenswrapper[4911]: I0929 21:26:10.098563 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:10 crc kubenswrapper[4911]: I0929 21:26:10.098633 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:10 crc kubenswrapper[4911]: I0929 21:26:10.098658 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:10 crc kubenswrapper[4911]: I0929 21:26:10.098690 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:10 crc kubenswrapper[4911]: I0929 21:26:10.098714 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:10Z","lastTransitionTime":"2025-09-29T21:26:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:10 crc kubenswrapper[4911]: I0929 21:26:10.202219 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:10 crc kubenswrapper[4911]: I0929 21:26:10.202292 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:10 crc kubenswrapper[4911]: I0929 21:26:10.202307 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:10 crc kubenswrapper[4911]: I0929 21:26:10.202332 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:10 crc kubenswrapper[4911]: I0929 21:26:10.202396 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:10Z","lastTransitionTime":"2025-09-29T21:26:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:10 crc kubenswrapper[4911]: I0929 21:26:10.306003 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:10 crc kubenswrapper[4911]: I0929 21:26:10.306043 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:10 crc kubenswrapper[4911]: I0929 21:26:10.306054 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:10 crc kubenswrapper[4911]: I0929 21:26:10.306072 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:10 crc kubenswrapper[4911]: I0929 21:26:10.306085 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:10Z","lastTransitionTime":"2025-09-29T21:26:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:10 crc kubenswrapper[4911]: I0929 21:26:10.410186 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:10 crc kubenswrapper[4911]: I0929 21:26:10.410323 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:10 crc kubenswrapper[4911]: I0929 21:26:10.410339 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:10 crc kubenswrapper[4911]: I0929 21:26:10.410357 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:10 crc kubenswrapper[4911]: I0929 21:26:10.410391 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:10Z","lastTransitionTime":"2025-09-29T21:26:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:10 crc kubenswrapper[4911]: I0929 21:26:10.514751 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:10 crc kubenswrapper[4911]: I0929 21:26:10.514873 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:10 crc kubenswrapper[4911]: I0929 21:26:10.514901 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:10 crc kubenswrapper[4911]: I0929 21:26:10.514933 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:10 crc kubenswrapper[4911]: I0929 21:26:10.514956 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:10Z","lastTransitionTime":"2025-09-29T21:26:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:10 crc kubenswrapper[4911]: I0929 21:26:10.619486 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:10 crc kubenswrapper[4911]: I0929 21:26:10.619558 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:10 crc kubenswrapper[4911]: I0929 21:26:10.619582 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:10 crc kubenswrapper[4911]: I0929 21:26:10.619675 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:10 crc kubenswrapper[4911]: I0929 21:26:10.619722 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:10Z","lastTransitionTime":"2025-09-29T21:26:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:10 crc kubenswrapper[4911]: I0929 21:26:10.700945 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 21:26:10 crc kubenswrapper[4911]: I0929 21:26:10.701123 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-d5gdh" Sep 29 21:26:10 crc kubenswrapper[4911]: E0929 21:26:10.701357 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 21:26:10 crc kubenswrapper[4911]: E0929 21:26:10.701707 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-d5gdh" podUID="b53f9593-39bf-43e0-b1de-09192d0167cd" Sep 29 21:26:10 crc kubenswrapper[4911]: I0929 21:26:10.701880 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:26:10 crc kubenswrapper[4911]: E0929 21:26:10.702706 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 21:26:10 crc kubenswrapper[4911]: I0929 21:26:10.703405 4911 scope.go:117] "RemoveContainer" containerID="e367c219f65b163fea8802c80801d60df0804adc0f6d14f18ee540c114b4ba14" Sep 29 21:26:10 crc kubenswrapper[4911]: I0929 21:26:10.724831 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:10 crc kubenswrapper[4911]: I0929 21:26:10.724938 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:10 crc kubenswrapper[4911]: I0929 21:26:10.724965 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:10 crc kubenswrapper[4911]: I0929 21:26:10.725035 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:10 crc kubenswrapper[4911]: I0929 21:26:10.725065 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:10Z","lastTransitionTime":"2025-09-29T21:26:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:10 crc kubenswrapper[4911]: I0929 21:26:10.828987 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:10 crc kubenswrapper[4911]: I0929 21:26:10.829358 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:10 crc kubenswrapper[4911]: I0929 21:26:10.829375 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:10 crc kubenswrapper[4911]: I0929 21:26:10.829400 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:10 crc kubenswrapper[4911]: I0929 21:26:10.829419 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:10Z","lastTransitionTime":"2025-09-29T21:26:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:10 crc kubenswrapper[4911]: I0929 21:26:10.931959 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:10 crc kubenswrapper[4911]: I0929 21:26:10.932026 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:10 crc kubenswrapper[4911]: I0929 21:26:10.932040 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:10 crc kubenswrapper[4911]: I0929 21:26:10.932063 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:10 crc kubenswrapper[4911]: I0929 21:26:10.932078 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:10Z","lastTransitionTime":"2025-09-29T21:26:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:11 crc kubenswrapper[4911]: I0929 21:26:11.035443 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:11 crc kubenswrapper[4911]: I0929 21:26:11.035501 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:11 crc kubenswrapper[4911]: I0929 21:26:11.035511 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:11 crc kubenswrapper[4911]: I0929 21:26:11.035534 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:11 crc kubenswrapper[4911]: I0929 21:26:11.035548 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:11Z","lastTransitionTime":"2025-09-29T21:26:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:11 crc kubenswrapper[4911]: I0929 21:26:11.123230 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-9wxd8_4e3aa70f-b0da-44c9-a850-96d4494b02fc/ovnkube-controller/1.log" Sep 29 21:26:11 crc kubenswrapper[4911]: I0929 21:26:11.128053 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" event={"ID":"4e3aa70f-b0da-44c9-a850-96d4494b02fc","Type":"ContainerStarted","Data":"de700bedd50f64841f1af98c4ba1a6b209e5990da83985a88e512b3079391876"} Sep 29 21:26:11 crc kubenswrapper[4911]: I0929 21:26:11.128904 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" Sep 29 21:26:11 crc kubenswrapper[4911]: I0929 21:26:11.138298 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:11 crc kubenswrapper[4911]: I0929 21:26:11.138431 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:11 crc kubenswrapper[4911]: I0929 21:26:11.138459 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:11 crc kubenswrapper[4911]: I0929 21:26:11.138531 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:11 crc kubenswrapper[4911]: I0929 21:26:11.138555 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:11Z","lastTransitionTime":"2025-09-29T21:26:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:11 crc kubenswrapper[4911]: I0929 21:26:11.150906 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:11Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:11 crc kubenswrapper[4911]: I0929 21:26:11.164752 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a92f3bb864cba0c6615e8a07233e923c0d0681e83ad6722aa7c7043fb5966d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:11Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:11 crc kubenswrapper[4911]: I0929 21:26:11.178914 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cfbgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5b94596-b945-4a89-b362-ec649e8e7981\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5154aa6a1a3a9b1a5a58752607c01b955e28b7e896bf9b99b5cca2448077ebd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm7xf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1f8c9f36d4fa9ccfe9c4fdf2794b819a0de928056fc3c320b6c83f9c518258f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm7xf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:56Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-cfbgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:11Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:11 crc kubenswrapper[4911]: I0929 21:26:11.214224 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85fccbbf-637b-4195-89ba-828db0f10fb3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11c497bb84920c85649975dc5a3ba883723017d5503200e47c08427f45b9edfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a594d1677deac968c88fac4a64d4b047cf02ad8d6db74ee2ebe170d4135f588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2db0acd86c2dfcd0303626aa6b66832b18aec98dab908328288cdf0e0a18f24d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72188960402faa7956adf357824c6ea0a7015a75ed5191853ca95b12feb2cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf2588ddec2be0fa843630cdc268617c399a7a1615705cc4b088fc806031d557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0546d8270e2cbba06a241e421fa7ea130e53302a3f71f81829198d7ab8f3e6b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0546d8270e2cbba06a241e421fa7ea130e53302a3f71f81829198d7ab8f3e6b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123ea91d0a01f780d09196824c362eab68a4b8d943d414e80a3a0bbc2959e178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://123ea91d0a01f780d09196824c362eab68a4b8d943d414e80a3a0bbc2959e178\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://17751d3a9dd568ddb6c27efe08354f37aa4af7a91312e8c41e3e2e509d5be8ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://17751d3a9dd568ddb6c27efe08354f37aa4af7a91312e8c41e3e2e509d5be8ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:11Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:11 crc kubenswrapper[4911]: I0929 21:26:11.238895 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c6a0af8cc03379c6fa2a3969e5852a340c32bd8513dc7c963e074bb1694d436\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:11Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:11 crc kubenswrapper[4911]: I0929 21:26:11.240948 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:11 crc kubenswrapper[4911]: I0929 21:26:11.240992 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:11 crc kubenswrapper[4911]: I0929 21:26:11.241005 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:11 crc kubenswrapper[4911]: I0929 21:26:11.241025 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:11 crc kubenswrapper[4911]: I0929 21:26:11.241038 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:11Z","lastTransitionTime":"2025-09-29T21:26:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:11 crc kubenswrapper[4911]: I0929 21:26:11.254925 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dnhjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fe62a53-f0b8-4c66-8adf-3b9f8bef4195\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b11f03867ce6c678d04431a19903c21acd7665f0cca4c7b0344e9d1cf0e17e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64hpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dnhjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:11Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:11 crc kubenswrapper[4911]: I0929 21:26:11.272978 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bp485" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"364060da-3bac-4f3e-b8b8-a64b0441cb5e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7dba9267c23bc8990d70b320a688e96796cf75aca99f5a3ce12fa82327cb2d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7962008bbec5a6556b4ab76cf151a68e971bde07fdc5fe560c561f42cf597d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7962008bbec5a6556b4ab76cf151a68e971bde07fdc5fe560c561f42cf597d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a6af280cf9196a9ed2a1798f7f4e7d6504ef6707c1d265fb9b232c02d70c205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a6af280cf9196a9ed2a1798f7f4e7d6504ef6707c1d265fb9b232c02d70c205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4783145a17a1233241a3766d9b259d16ed82b2076ff6790c1a368d713271cd91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4783145a17a1233241a3766d9b259d16ed82b2076ff6790c1a368d713271cd91\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0bb5fec15ae4eb585d8ba1c0bedabe62a4094ca50044a09b44439dc555f064d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0bb5fec15ae4eb585d8ba1c0bedabe62a4094ca50044a09b44439dc555f064d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c153df9063b1a1e897e8d02e139d641d00d84dce42004be4c69dd55d44e7436b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c153df9063b1a1e897e8d02e139d641d00d84dce42004be4c69dd55d44e7436b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9acf5d7a17bf6bb6d1ad415d830594b8f6b986fbb7c58706b19d28546dfae6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9acf5d7a17bf6bb6d1ad415d830594b8f6b986fbb7c58706b19d28546dfae6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bp485\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:11Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:11 crc kubenswrapper[4911]: I0929 21:26:11.284012 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dz6zq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c16af8d-e647-4820-b96f-298cce113ab1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e64c28e47bb861649d58b34f5e3c369cfaf8c808435b32a13ebfc2c02b7b6db9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbgg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dz6zq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:11Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:11 crc kubenswrapper[4911]: I0929 21:26:11.297866 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w647f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50640abc-40db-4390-82d1-f3cfc76da71c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0479676aa753b6e60f9a57eb2a3055ddc5b10bce1547ac60e216c0865e79f24b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://568d099d7beea914cf9f9eb703e36dd179359d3f4406bae454334e39b0de79ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w647f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:11Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:11 crc kubenswrapper[4911]: I0929 21:26:11.318673 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f56832a-7fd7-428e-85ee-55c5dea7b67a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ff3cf4013e69405637fe1483d5eb8ab2d12a0ba222c8446d93445dcb94ac9b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b32578d3e75bb839ad134f88850ba1b26e57846431fac71b64c5ddbce802464\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4539e6f57be0c686d5ea6ccbf1537c7c93698f81d2fdcfe5a143ce1f014e09\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57c9f68393ebed4fd5fe219544b0585a743989f8c54239e19ec702bddf9c30f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9dfa222360c949632050bd1dc5453a14c0dfa0d4614282132ceea2297405443e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T21:25:36Z\\\",\\\"message\\\":\\\"W0929 21:25:25.867198 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 21:25:25.867655 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759181125 cert, and key in /tmp/serving-cert-2091738680/serving-signer.crt, /tmp/serving-cert-2091738680/serving-signer.key\\\\nI0929 21:25:26.230899 1 observer_polling.go:159] Starting file observer\\\\nW0929 21:25:26.235864 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 21:25:26.236118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 21:25:26.238856 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2091738680/tls.crt::/tmp/serving-cert-2091738680/tls.key\\\\\\\"\\\\nF0929 21:25:36.606155 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47cf5d1e65f887e84d3c0694d5038edf3451c4af14d08a29d48562d05e36ea26\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:11Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:11 crc kubenswrapper[4911]: I0929 21:26:11.333184 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8646497-f7ad-4b38-a69f-9a14345af1c8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d3bb437bdac371dd4e22f20f304a5381c128ac2b774c51bc4f748a1c7aa3834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://129d51f84c8798ed7b1078f34899d4231c7bf4e1a75212f694c6c027b8463b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://290e37c77297fae7dcb3540911f6b2917e26daee3623c9106e34de394ccfa01f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7088f5a3752b8778de247eacb408d511197d1501e8e2811a98a9b713ca1fca04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:11Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:11 crc kubenswrapper[4911]: I0929 21:26:11.343813 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:11 crc kubenswrapper[4911]: I0929 21:26:11.343853 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:11 crc kubenswrapper[4911]: I0929 21:26:11.343876 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:11 crc kubenswrapper[4911]: I0929 21:26:11.343893 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:11 crc kubenswrapper[4911]: I0929 21:26:11.343903 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:11Z","lastTransitionTime":"2025-09-29T21:26:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:11 crc kubenswrapper[4911]: I0929 21:26:11.347841 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:11Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:11 crc kubenswrapper[4911]: I0929 21:26:11.366045 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:11Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:11 crc kubenswrapper[4911]: I0929 21:26:11.384647 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4052e710e0a06bf38d26e6006248d090051284bc0eae7a68b17241ef8016909\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a7d7fad5efb67cc7ef796977f03cc9c0e69fcc4340ca28749fffcc13236ed8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:11Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:11 crc kubenswrapper[4911]: I0929 21:26:11.398129 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lrfbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1179c900-e866-4c5a-bb06-6032cc03a075\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bb16b2f889ccaf66b96ffea2f6585967c14530a95a95000b652ce817e97c37e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jwhfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lrfbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:11Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:11 crc kubenswrapper[4911]: I0929 21:26:11.417501 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e3aa70f-b0da-44c9-a850-96d4494b02fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3568f71a4a1762e0d9be8ce4c317471f3f99a7c5b92b7daeb9fbace778f7e66b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00681163bfe474dabf977e0c43ac71148c2a472efd4bc57f3d145ba1134a74aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db1941e9959584d925b6ac546c234ce0dfaa2960a1277f282b51deb5ee56ec87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e87c1e61cde26c6ae4496e13ebe8e46857c8656720399f3d89b9e9a75bb4681d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c06f6555e780f2f9f69cabcd3e26595acfde3e973387ed7b10345c673c17f5d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c62aa953604f4ce35ca351ecf4e03da067af96f004c3ba3ea5f3a54b89def33d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de700bedd50f64841f1af98c4ba1a6b209e5990da83985a88e512b3079391876\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e367c219f65b163fea8802c80801d60df0804adc0f6d14f18ee540c114b4ba14\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T21:25:55Z\\\",\\\"message\\\":\\\"troller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:55Z is after 2025-08-24T17:21:41Z]\\\\nI0929 21:25:55.105695 6368 services_controller.go:453] Built service openshift-service-ca-operator/metrics template LB for network=default: []services.LB{}\\\\nI0929 21:25:55.105686 6368 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/cluster-autoscaler-operator_TCP_cluster\\\\\\\", UUID:\\\\\\\"54fbe873-7e6d-475f-a0ad-8dd5f06d850d\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/cluster-autoscaler-operator\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/cluster-autoscaler-operator_TCP_cluster\\\\\\\", UUI\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:54Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:26:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68ec54cc6e84d45b9550a2dcbd53b3ffc8cc97cb94da8d9961b29965af08b7e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9wxd8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:11Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:11 crc kubenswrapper[4911]: I0929 21:26:11.431155 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-d5gdh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b53f9593-39bf-43e0-b1de-09192d0167cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-24xkh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-24xkh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:57Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-d5gdh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:11Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:11 crc kubenswrapper[4911]: I0929 21:26:11.446259 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:11 crc kubenswrapper[4911]: I0929 21:26:11.446314 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:11 crc kubenswrapper[4911]: I0929 21:26:11.446327 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:11 crc kubenswrapper[4911]: I0929 21:26:11.446349 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:11 crc kubenswrapper[4911]: I0929 21:26:11.446361 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:11Z","lastTransitionTime":"2025-09-29T21:26:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:11 crc kubenswrapper[4911]: I0929 21:26:11.549463 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:11 crc kubenswrapper[4911]: I0929 21:26:11.549526 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:11 crc kubenswrapper[4911]: I0929 21:26:11.549544 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:11 crc kubenswrapper[4911]: I0929 21:26:11.549566 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:11 crc kubenswrapper[4911]: I0929 21:26:11.549581 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:11Z","lastTransitionTime":"2025-09-29T21:26:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:11 crc kubenswrapper[4911]: I0929 21:26:11.653768 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:11 crc kubenswrapper[4911]: I0929 21:26:11.653863 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:11 crc kubenswrapper[4911]: I0929 21:26:11.653876 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:11 crc kubenswrapper[4911]: I0929 21:26:11.653896 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:11 crc kubenswrapper[4911]: I0929 21:26:11.653909 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:11Z","lastTransitionTime":"2025-09-29T21:26:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:11 crc kubenswrapper[4911]: I0929 21:26:11.700430 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 21:26:11 crc kubenswrapper[4911]: E0929 21:26:11.700577 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 21:26:11 crc kubenswrapper[4911]: I0929 21:26:11.756771 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:11 crc kubenswrapper[4911]: I0929 21:26:11.756887 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:11 crc kubenswrapper[4911]: I0929 21:26:11.756903 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:11 crc kubenswrapper[4911]: I0929 21:26:11.756928 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:11 crc kubenswrapper[4911]: I0929 21:26:11.756945 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:11Z","lastTransitionTime":"2025-09-29T21:26:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:11 crc kubenswrapper[4911]: I0929 21:26:11.860815 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:11 crc kubenswrapper[4911]: I0929 21:26:11.860861 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:11 crc kubenswrapper[4911]: I0929 21:26:11.860873 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:11 crc kubenswrapper[4911]: I0929 21:26:11.860888 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:11 crc kubenswrapper[4911]: I0929 21:26:11.860899 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:11Z","lastTransitionTime":"2025-09-29T21:26:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:11 crc kubenswrapper[4911]: I0929 21:26:11.964017 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:11 crc kubenswrapper[4911]: I0929 21:26:11.964127 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:11 crc kubenswrapper[4911]: I0929 21:26:11.964153 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:11 crc kubenswrapper[4911]: I0929 21:26:11.964190 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:11 crc kubenswrapper[4911]: I0929 21:26:11.964212 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:11Z","lastTransitionTime":"2025-09-29T21:26:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.067606 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.067667 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.067690 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.067720 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.067742 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:12Z","lastTransitionTime":"2025-09-29T21:26:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.134375 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-9wxd8_4e3aa70f-b0da-44c9-a850-96d4494b02fc/ovnkube-controller/2.log" Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.135436 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-9wxd8_4e3aa70f-b0da-44c9-a850-96d4494b02fc/ovnkube-controller/1.log" Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.139210 4911 generic.go:334] "Generic (PLEG): container finished" podID="4e3aa70f-b0da-44c9-a850-96d4494b02fc" containerID="de700bedd50f64841f1af98c4ba1a6b209e5990da83985a88e512b3079391876" exitCode=1 Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.139295 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" event={"ID":"4e3aa70f-b0da-44c9-a850-96d4494b02fc","Type":"ContainerDied","Data":"de700bedd50f64841f1af98c4ba1a6b209e5990da83985a88e512b3079391876"} Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.139398 4911 scope.go:117] "RemoveContainer" containerID="e367c219f65b163fea8802c80801d60df0804adc0f6d14f18ee540c114b4ba14" Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.140538 4911 scope.go:117] "RemoveContainer" containerID="de700bedd50f64841f1af98c4ba1a6b209e5990da83985a88e512b3079391876" Sep 29 21:26:12 crc kubenswrapper[4911]: E0929 21:26:12.140885 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-9wxd8_openshift-ovn-kubernetes(4e3aa70f-b0da-44c9-a850-96d4494b02fc)\"" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" podUID="4e3aa70f-b0da-44c9-a850-96d4494b02fc" Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.165095 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:12Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.171624 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.171702 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.171731 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.171765 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.171827 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:12Z","lastTransitionTime":"2025-09-29T21:26:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.186410 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a92f3bb864cba0c6615e8a07233e923c0d0681e83ad6722aa7c7043fb5966d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:12Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.202181 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cfbgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5b94596-b945-4a89-b362-ec649e8e7981\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5154aa6a1a3a9b1a5a58752607c01b955e28b7e896bf9b99b5cca2448077ebd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm7xf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1f8c9f36d4fa9ccfe9c4fdf2794b819a0de928056fc3c320b6c83f9c518258f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm7xf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:56Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-cfbgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:12Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.234258 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85fccbbf-637b-4195-89ba-828db0f10fb3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11c497bb84920c85649975dc5a3ba883723017d5503200e47c08427f45b9edfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a594d1677deac968c88fac4a64d4b047cf02ad8d6db74ee2ebe170d4135f588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2db0acd86c2dfcd0303626aa6b66832b18aec98dab908328288cdf0e0a18f24d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72188960402faa7956adf357824c6ea0a7015a75ed5191853ca95b12feb2cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf2588ddec2be0fa843630cdc268617c399a7a1615705cc4b088fc806031d557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0546d8270e2cbba06a241e421fa7ea130e53302a3f71f81829198d7ab8f3e6b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0546d8270e2cbba06a241e421fa7ea130e53302a3f71f81829198d7ab8f3e6b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123ea91d0a01f780d09196824c362eab68a4b8d943d414e80a3a0bbc2959e178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://123ea91d0a01f780d09196824c362eab68a4b8d943d414e80a3a0bbc2959e178\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://17751d3a9dd568ddb6c27efe08354f37aa4af7a91312e8c41e3e2e509d5be8ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://17751d3a9dd568ddb6c27efe08354f37aa4af7a91312e8c41e3e2e509d5be8ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:12Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.251856 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c6a0af8cc03379c6fa2a3969e5852a340c32bd8513dc7c963e074bb1694d436\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:12Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.267225 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dnhjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fe62a53-f0b8-4c66-8adf-3b9f8bef4195\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b11f03867ce6c678d04431a19903c21acd7665f0cca4c7b0344e9d1cf0e17e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64hpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dnhjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:12Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.274874 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.274950 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.274970 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.274997 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.275016 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:12Z","lastTransitionTime":"2025-09-29T21:26:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.293204 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bp485" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"364060da-3bac-4f3e-b8b8-a64b0441cb5e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7dba9267c23bc8990d70b320a688e96796cf75aca99f5a3ce12fa82327cb2d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7962008bbec5a6556b4ab76cf151a68e971bde07fdc5fe560c561f42cf597d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7962008bbec5a6556b4ab76cf151a68e971bde07fdc5fe560c561f42cf597d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a6af280cf9196a9ed2a1798f7f4e7d6504ef6707c1d265fb9b232c02d70c205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a6af280cf9196a9ed2a1798f7f4e7d6504ef6707c1d265fb9b232c02d70c205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4783145a17a1233241a3766d9b259d16ed82b2076ff6790c1a368d713271cd91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4783145a17a1233241a3766d9b259d16ed82b2076ff6790c1a368d713271cd91\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0bb5fec15ae4eb585d8ba1c0bedabe62a4094ca50044a09b44439dc555f064d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0bb5fec15ae4eb585d8ba1c0bedabe62a4094ca50044a09b44439dc555f064d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c153df9063b1a1e897e8d02e139d641d00d84dce42004be4c69dd55d44e7436b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c153df9063b1a1e897e8d02e139d641d00d84dce42004be4c69dd55d44e7436b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9acf5d7a17bf6bb6d1ad415d830594b8f6b986fbb7c58706b19d28546dfae6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9acf5d7a17bf6bb6d1ad415d830594b8f6b986fbb7c58706b19d28546dfae6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bp485\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:12Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.307766 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dz6zq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c16af8d-e647-4820-b96f-298cce113ab1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e64c28e47bb861649d58b34f5e3c369cfaf8c808435b32a13ebfc2c02b7b6db9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbgg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dz6zq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:12Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.323842 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w647f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50640abc-40db-4390-82d1-f3cfc76da71c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0479676aa753b6e60f9a57eb2a3055ddc5b10bce1547ac60e216c0865e79f24b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://568d099d7beea914cf9f9eb703e36dd179359d3f4406bae454334e39b0de79ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w647f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:12Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.346997 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lrfbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1179c900-e866-4c5a-bb06-6032cc03a075\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bb16b2f889ccaf66b96ffea2f6585967c14530a95a95000b652ce817e97c37e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jwhfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lrfbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:12Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.376132 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e3aa70f-b0da-44c9-a850-96d4494b02fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3568f71a4a1762e0d9be8ce4c317471f3f99a7c5b92b7daeb9fbace778f7e66b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00681163bfe474dabf977e0c43ac71148c2a472efd4bc57f3d145ba1134a74aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db1941e9959584d925b6ac546c234ce0dfaa2960a1277f282b51deb5ee56ec87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e87c1e61cde26c6ae4496e13ebe8e46857c8656720399f3d89b9e9a75bb4681d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c06f6555e780f2f9f69cabcd3e26595acfde3e973387ed7b10345c673c17f5d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c62aa953604f4ce35ca351ecf4e03da067af96f004c3ba3ea5f3a54b89def33d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de700bedd50f64841f1af98c4ba1a6b209e5990da83985a88e512b3079391876\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e367c219f65b163fea8802c80801d60df0804adc0f6d14f18ee540c114b4ba14\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T21:25:55Z\\\",\\\"message\\\":\\\"troller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:55Z is after 2025-08-24T17:21:41Z]\\\\nI0929 21:25:55.105695 6368 services_controller.go:453] Built service openshift-service-ca-operator/metrics template LB for network=default: []services.LB{}\\\\nI0929 21:25:55.105686 6368 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/cluster-autoscaler-operator_TCP_cluster\\\\\\\", UUID:\\\\\\\"54fbe873-7e6d-475f-a0ad-8dd5f06d850d\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/cluster-autoscaler-operator\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/cluster-autoscaler-operator_TCP_cluster\\\\\\\", UUI\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:54Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de700bedd50f64841f1af98c4ba1a6b209e5990da83985a88e512b3079391876\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T21:26:11Z\\\",\\\"message\\\":\\\"troller\\\\nI0929 21:26:11.697630 6582 admin_network_policy_namespace.go:56] Finished syncing Namespace openshift-service-ca-operator Admin Network Policy controller: took 9.93µs\\\\nI0929 21:26:11.697571 6582 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0929 21:26:11.697666 6582 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0929 21:26:11.697693 6582 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0929 21:26:11.697699 6582 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0929 21:26:11.697816 6582 factory.go:656] Stopping watch factory\\\\nI0929 21:26:11.697792 6582 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0929 21:26:11.697812 6582 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0929 21:26:11.697832 6582 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0929 21:26:11.697843 6582 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0929 21:26:11.697940 6582 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0929 21:26:11.698057 6582 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0929 21:26:11.698113 6582 ovnkube.go:599] Stopped ovnkube\\\\nI0929 21:26:11.698146 6582 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0929 21:26:11.698263 6582 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:26:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68ec54cc6e84d45b9550a2dcbd53b3ffc8cc97cb94da8d9961b29965af08b7e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9wxd8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:12Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.379577 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.379644 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.379666 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.379694 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.379712 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:12Z","lastTransitionTime":"2025-09-29T21:26:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.391756 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-d5gdh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b53f9593-39bf-43e0-b1de-09192d0167cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-24xkh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-24xkh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:57Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-d5gdh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:12Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.415672 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f56832a-7fd7-428e-85ee-55c5dea7b67a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ff3cf4013e69405637fe1483d5eb8ab2d12a0ba222c8446d93445dcb94ac9b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b32578d3e75bb839ad134f88850ba1b26e57846431fac71b64c5ddbce802464\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4539e6f57be0c686d5ea6ccbf1537c7c93698f81d2fdcfe5a143ce1f014e09\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57c9f68393ebed4fd5fe219544b0585a743989f8c54239e19ec702bddf9c30f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9dfa222360c949632050bd1dc5453a14c0dfa0d4614282132ceea2297405443e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T21:25:36Z\\\",\\\"message\\\":\\\"W0929 21:25:25.867198 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 21:25:25.867655 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759181125 cert, and key in /tmp/serving-cert-2091738680/serving-signer.crt, /tmp/serving-cert-2091738680/serving-signer.key\\\\nI0929 21:25:26.230899 1 observer_polling.go:159] Starting file observer\\\\nW0929 21:25:26.235864 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 21:25:26.236118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 21:25:26.238856 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2091738680/tls.crt::/tmp/serving-cert-2091738680/tls.key\\\\\\\"\\\\nF0929 21:25:36.606155 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47cf5d1e65f887e84d3c0694d5038edf3451c4af14d08a29d48562d05e36ea26\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:12Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.436378 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8646497-f7ad-4b38-a69f-9a14345af1c8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d3bb437bdac371dd4e22f20f304a5381c128ac2b774c51bc4f748a1c7aa3834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://129d51f84c8798ed7b1078f34899d4231c7bf4e1a75212f694c6c027b8463b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://290e37c77297fae7dcb3540911f6b2917e26daee3623c9106e34de394ccfa01f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7088f5a3752b8778de247eacb408d511197d1501e8e2811a98a9b713ca1fca04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:12Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.454481 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:12Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.475590 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:12Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.482826 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.482885 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.483375 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.483428 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.483456 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:12Z","lastTransitionTime":"2025-09-29T21:26:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.496140 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4052e710e0a06bf38d26e6006248d090051284bc0eae7a68b17241ef8016909\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a7d7fad5efb67cc7ef796977f03cc9c0e69fcc4340ca28749fffcc13236ed8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:12Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.588084 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.588172 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.588190 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.588217 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.588237 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:12Z","lastTransitionTime":"2025-09-29T21:26:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.691748 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.691858 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.691878 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.691906 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.691926 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:12Z","lastTransitionTime":"2025-09-29T21:26:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.700283 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.700385 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:26:12 crc kubenswrapper[4911]: E0929 21:26:12.700457 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.700405 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-d5gdh" Sep 29 21:26:12 crc kubenswrapper[4911]: E0929 21:26:12.700634 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 21:26:12 crc kubenswrapper[4911]: E0929 21:26:12.700977 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-d5gdh" podUID="b53f9593-39bf-43e0-b1de-09192d0167cd" Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.717636 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f56832a-7fd7-428e-85ee-55c5dea7b67a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ff3cf4013e69405637fe1483d5eb8ab2d12a0ba222c8446d93445dcb94ac9b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b32578d3e75bb839ad134f88850ba1b26e57846431fac71b64c5ddbce802464\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4539e6f57be0c686d5ea6ccbf1537c7c93698f81d2fdcfe5a143ce1f014e09\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57c9f68393ebed4fd5fe219544b0585a743989f8c54239e19ec702bddf9c30f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9dfa222360c949632050bd1dc5453a14c0dfa0d4614282132ceea2297405443e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T21:25:36Z\\\",\\\"message\\\":\\\"W0929 21:25:25.867198 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 21:25:25.867655 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759181125 cert, and key in /tmp/serving-cert-2091738680/serving-signer.crt, /tmp/serving-cert-2091738680/serving-signer.key\\\\nI0929 21:25:26.230899 1 observer_polling.go:159] Starting file observer\\\\nW0929 21:25:26.235864 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 21:25:26.236118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 21:25:26.238856 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2091738680/tls.crt::/tmp/serving-cert-2091738680/tls.key\\\\\\\"\\\\nF0929 21:25:36.606155 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47cf5d1e65f887e84d3c0694d5038edf3451c4af14d08a29d48562d05e36ea26\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:12Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.741136 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8646497-f7ad-4b38-a69f-9a14345af1c8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d3bb437bdac371dd4e22f20f304a5381c128ac2b774c51bc4f748a1c7aa3834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://129d51f84c8798ed7b1078f34899d4231c7bf4e1a75212f694c6c027b8463b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://290e37c77297fae7dcb3540911f6b2917e26daee3623c9106e34de394ccfa01f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7088f5a3752b8778de247eacb408d511197d1501e8e2811a98a9b713ca1fca04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:12Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.760579 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:12Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.780606 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:12Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.794325 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.794382 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.794399 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.794424 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.794442 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:12Z","lastTransitionTime":"2025-09-29T21:26:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.804098 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4052e710e0a06bf38d26e6006248d090051284bc0eae7a68b17241ef8016909\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a7d7fad5efb67cc7ef796977f03cc9c0e69fcc4340ca28749fffcc13236ed8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:12Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.825328 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lrfbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1179c900-e866-4c5a-bb06-6032cc03a075\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bb16b2f889ccaf66b96ffea2f6585967c14530a95a95000b652ce817e97c37e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jwhfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lrfbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:12Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.859806 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e3aa70f-b0da-44c9-a850-96d4494b02fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3568f71a4a1762e0d9be8ce4c317471f3f99a7c5b92b7daeb9fbace778f7e66b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00681163bfe474dabf977e0c43ac71148c2a472efd4bc57f3d145ba1134a74aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db1941e9959584d925b6ac546c234ce0dfaa2960a1277f282b51deb5ee56ec87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e87c1e61cde26c6ae4496e13ebe8e46857c8656720399f3d89b9e9a75bb4681d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c06f6555e780f2f9f69cabcd3e26595acfde3e973387ed7b10345c673c17f5d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c62aa953604f4ce35ca351ecf4e03da067af96f004c3ba3ea5f3a54b89def33d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de700bedd50f64841f1af98c4ba1a6b209e5990da83985a88e512b3079391876\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e367c219f65b163fea8802c80801d60df0804adc0f6d14f18ee540c114b4ba14\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T21:25:55Z\\\",\\\"message\\\":\\\"troller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:25:55Z is after 2025-08-24T17:21:41Z]\\\\nI0929 21:25:55.105695 6368 services_controller.go:453] Built service openshift-service-ca-operator/metrics template LB for network=default: []services.LB{}\\\\nI0929 21:25:55.105686 6368 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/cluster-autoscaler-operator_TCP_cluster\\\\\\\", UUID:\\\\\\\"54fbe873-7e6d-475f-a0ad-8dd5f06d850d\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/cluster-autoscaler-operator\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/cluster-autoscaler-operator_TCP_cluster\\\\\\\", UUI\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:54Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de700bedd50f64841f1af98c4ba1a6b209e5990da83985a88e512b3079391876\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T21:26:11Z\\\",\\\"message\\\":\\\"troller\\\\nI0929 21:26:11.697630 6582 admin_network_policy_namespace.go:56] Finished syncing Namespace openshift-service-ca-operator Admin Network Policy controller: took 9.93µs\\\\nI0929 21:26:11.697571 6582 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0929 21:26:11.697666 6582 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0929 21:26:11.697693 6582 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0929 21:26:11.697699 6582 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0929 21:26:11.697816 6582 factory.go:656] Stopping watch factory\\\\nI0929 21:26:11.697792 6582 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0929 21:26:11.697812 6582 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0929 21:26:11.697832 6582 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0929 21:26:11.697843 6582 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0929 21:26:11.697940 6582 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0929 21:26:11.698057 6582 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0929 21:26:11.698113 6582 ovnkube.go:599] Stopped ovnkube\\\\nI0929 21:26:11.698146 6582 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0929 21:26:11.698263 6582 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:26:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68ec54cc6e84d45b9550a2dcbd53b3ffc8cc97cb94da8d9961b29965af08b7e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9wxd8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:12Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.877749 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-d5gdh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b53f9593-39bf-43e0-b1de-09192d0167cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-24xkh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-24xkh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:57Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-d5gdh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:12Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.895705 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:12Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.905263 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.905348 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.905374 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.905414 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.905443 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:12Z","lastTransitionTime":"2025-09-29T21:26:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.912696 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a92f3bb864cba0c6615e8a07233e923c0d0681e83ad6722aa7c7043fb5966d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:12Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.929307 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cfbgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5b94596-b945-4a89-b362-ec649e8e7981\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5154aa6a1a3a9b1a5a58752607c01b955e28b7e896bf9b99b5cca2448077ebd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm7xf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1f8c9f36d4fa9ccfe9c4fdf2794b819a0de928056fc3c320b6c83f9c518258f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm7xf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:56Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-cfbgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:12Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.956873 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85fccbbf-637b-4195-89ba-828db0f10fb3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11c497bb84920c85649975dc5a3ba883723017d5503200e47c08427f45b9edfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a594d1677deac968c88fac4a64d4b047cf02ad8d6db74ee2ebe170d4135f588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2db0acd86c2dfcd0303626aa6b66832b18aec98dab908328288cdf0e0a18f24d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72188960402faa7956adf357824c6ea0a7015a75ed5191853ca95b12feb2cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf2588ddec2be0fa843630cdc268617c399a7a1615705cc4b088fc806031d557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0546d8270e2cbba06a241e421fa7ea130e53302a3f71f81829198d7ab8f3e6b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0546d8270e2cbba06a241e421fa7ea130e53302a3f71f81829198d7ab8f3e6b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123ea91d0a01f780d09196824c362eab68a4b8d943d414e80a3a0bbc2959e178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://123ea91d0a01f780d09196824c362eab68a4b8d943d414e80a3a0bbc2959e178\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://17751d3a9dd568ddb6c27efe08354f37aa4af7a91312e8c41e3e2e509d5be8ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://17751d3a9dd568ddb6c27efe08354f37aa4af7a91312e8c41e3e2e509d5be8ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:12Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.978609 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c6a0af8cc03379c6fa2a3969e5852a340c32bd8513dc7c963e074bb1694d436\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:12Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:12 crc kubenswrapper[4911]: I0929 21:26:12.991756 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dnhjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fe62a53-f0b8-4c66-8adf-3b9f8bef4195\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b11f03867ce6c678d04431a19903c21acd7665f0cca4c7b0344e9d1cf0e17e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64hpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dnhjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:12Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.008969 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.009013 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.009025 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.009046 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.009060 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:13Z","lastTransitionTime":"2025-09-29T21:26:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.009075 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bp485" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"364060da-3bac-4f3e-b8b8-a64b0441cb5e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7dba9267c23bc8990d70b320a688e96796cf75aca99f5a3ce12fa82327cb2d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7962008bbec5a6556b4ab76cf151a68e971bde07fdc5fe560c561f42cf597d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7962008bbec5a6556b4ab76cf151a68e971bde07fdc5fe560c561f42cf597d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a6af280cf9196a9ed2a1798f7f4e7d6504ef6707c1d265fb9b232c02d70c205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a6af280cf9196a9ed2a1798f7f4e7d6504ef6707c1d265fb9b232c02d70c205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4783145a17a1233241a3766d9b259d16ed82b2076ff6790c1a368d713271cd91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4783145a17a1233241a3766d9b259d16ed82b2076ff6790c1a368d713271cd91\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0bb5fec15ae4eb585d8ba1c0bedabe62a4094ca50044a09b44439dc555f064d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0bb5fec15ae4eb585d8ba1c0bedabe62a4094ca50044a09b44439dc555f064d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c153df9063b1a1e897e8d02e139d641d00d84dce42004be4c69dd55d44e7436b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c153df9063b1a1e897e8d02e139d641d00d84dce42004be4c69dd55d44e7436b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9acf5d7a17bf6bb6d1ad415d830594b8f6b986fbb7c58706b19d28546dfae6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9acf5d7a17bf6bb6d1ad415d830594b8f6b986fbb7c58706b19d28546dfae6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bp485\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:13Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.019010 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dz6zq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c16af8d-e647-4820-b96f-298cce113ab1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e64c28e47bb861649d58b34f5e3c369cfaf8c808435b32a13ebfc2c02b7b6db9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbgg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dz6zq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:13Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.032705 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w647f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50640abc-40db-4390-82d1-f3cfc76da71c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0479676aa753b6e60f9a57eb2a3055ddc5b10bce1547ac60e216c0865e79f24b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://568d099d7beea914cf9f9eb703e36dd179359d3f4406bae454334e39b0de79ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w647f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:13Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.112494 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.112597 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.112613 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.112634 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.112648 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:13Z","lastTransitionTime":"2025-09-29T21:26:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.148202 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-9wxd8_4e3aa70f-b0da-44c9-a850-96d4494b02fc/ovnkube-controller/2.log" Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.153145 4911 scope.go:117] "RemoveContainer" containerID="de700bedd50f64841f1af98c4ba1a6b209e5990da83985a88e512b3079391876" Sep 29 21:26:13 crc kubenswrapper[4911]: E0929 21:26:13.153502 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-9wxd8_openshift-ovn-kubernetes(4e3aa70f-b0da-44c9-a850-96d4494b02fc)\"" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" podUID="4e3aa70f-b0da-44c9-a850-96d4494b02fc" Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.176004 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-d5gdh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b53f9593-39bf-43e0-b1de-09192d0167cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-24xkh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-24xkh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:57Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-d5gdh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:13Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.180730 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b53f9593-39bf-43e0-b1de-09192d0167cd-metrics-certs\") pod \"network-metrics-daemon-d5gdh\" (UID: \"b53f9593-39bf-43e0-b1de-09192d0167cd\") " pod="openshift-multus/network-metrics-daemon-d5gdh" Sep 29 21:26:13 crc kubenswrapper[4911]: E0929 21:26:13.180945 4911 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 21:26:13 crc kubenswrapper[4911]: E0929 21:26:13.181143 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b53f9593-39bf-43e0-b1de-09192d0167cd-metrics-certs podName:b53f9593-39bf-43e0-b1de-09192d0167cd nodeName:}" failed. No retries permitted until 2025-09-29 21:26:29.181120639 +0000 UTC m=+67.158233340 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/b53f9593-39bf-43e0-b1de-09192d0167cd-metrics-certs") pod "network-metrics-daemon-d5gdh" (UID: "b53f9593-39bf-43e0-b1de-09192d0167cd") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.200209 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f56832a-7fd7-428e-85ee-55c5dea7b67a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ff3cf4013e69405637fe1483d5eb8ab2d12a0ba222c8446d93445dcb94ac9b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b32578d3e75bb839ad134f88850ba1b26e57846431fac71b64c5ddbce802464\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4539e6f57be0c686d5ea6ccbf1537c7c93698f81d2fdcfe5a143ce1f014e09\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57c9f68393ebed4fd5fe219544b0585a743989f8c54239e19ec702bddf9c30f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9dfa222360c949632050bd1dc5453a14c0dfa0d4614282132ceea2297405443e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T21:25:36Z\\\",\\\"message\\\":\\\"W0929 21:25:25.867198 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 21:25:25.867655 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759181125 cert, and key in /tmp/serving-cert-2091738680/serving-signer.crt, /tmp/serving-cert-2091738680/serving-signer.key\\\\nI0929 21:25:26.230899 1 observer_polling.go:159] Starting file observer\\\\nW0929 21:25:26.235864 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 21:25:26.236118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 21:25:26.238856 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2091738680/tls.crt::/tmp/serving-cert-2091738680/tls.key\\\\\\\"\\\\nF0929 21:25:36.606155 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47cf5d1e65f887e84d3c0694d5038edf3451c4af14d08a29d48562d05e36ea26\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:13Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.215637 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.215670 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.215680 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.215696 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.215708 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:13Z","lastTransitionTime":"2025-09-29T21:26:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.220402 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8646497-f7ad-4b38-a69f-9a14345af1c8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d3bb437bdac371dd4e22f20f304a5381c128ac2b774c51bc4f748a1c7aa3834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://129d51f84c8798ed7b1078f34899d4231c7bf4e1a75212f694c6c027b8463b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://290e37c77297fae7dcb3540911f6b2917e26daee3623c9106e34de394ccfa01f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7088f5a3752b8778de247eacb408d511197d1501e8e2811a98a9b713ca1fca04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:13Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.234735 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:13Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.250556 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:13Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.266456 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4052e710e0a06bf38d26e6006248d090051284bc0eae7a68b17241ef8016909\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a7d7fad5efb67cc7ef796977f03cc9c0e69fcc4340ca28749fffcc13236ed8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:13Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.284727 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lrfbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1179c900-e866-4c5a-bb06-6032cc03a075\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bb16b2f889ccaf66b96ffea2f6585967c14530a95a95000b652ce817e97c37e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jwhfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lrfbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:13Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.321142 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.321235 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.321258 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.321289 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.321313 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:13Z","lastTransitionTime":"2025-09-29T21:26:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.333488 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e3aa70f-b0da-44c9-a850-96d4494b02fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3568f71a4a1762e0d9be8ce4c317471f3f99a7c5b92b7daeb9fbace778f7e66b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00681163bfe474dabf977e0c43ac71148c2a472efd4bc57f3d145ba1134a74aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db1941e9959584d925b6ac546c234ce0dfaa2960a1277f282b51deb5ee56ec87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e87c1e61cde26c6ae4496e13ebe8e46857c8656720399f3d89b9e9a75bb4681d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c06f6555e780f2f9f69cabcd3e26595acfde3e973387ed7b10345c673c17f5d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c62aa953604f4ce35ca351ecf4e03da067af96f004c3ba3ea5f3a54b89def33d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de700bedd50f64841f1af98c4ba1a6b209e5990da83985a88e512b3079391876\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de700bedd50f64841f1af98c4ba1a6b209e5990da83985a88e512b3079391876\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T21:26:11Z\\\",\\\"message\\\":\\\"troller\\\\nI0929 21:26:11.697630 6582 admin_network_policy_namespace.go:56] Finished syncing Namespace openshift-service-ca-operator Admin Network Policy controller: took 9.93µs\\\\nI0929 21:26:11.697571 6582 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0929 21:26:11.697666 6582 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0929 21:26:11.697693 6582 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0929 21:26:11.697699 6582 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0929 21:26:11.697816 6582 factory.go:656] Stopping watch factory\\\\nI0929 21:26:11.697792 6582 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0929 21:26:11.697812 6582 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0929 21:26:11.697832 6582 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0929 21:26:11.697843 6582 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0929 21:26:11.697940 6582 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0929 21:26:11.698057 6582 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0929 21:26:11.698113 6582 ovnkube.go:599] Stopped ovnkube\\\\nI0929 21:26:11.698146 6582 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0929 21:26:11.698263 6582 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:26:10Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-9wxd8_openshift-ovn-kubernetes(4e3aa70f-b0da-44c9-a850-96d4494b02fc)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68ec54cc6e84d45b9550a2dcbd53b3ffc8cc97cb94da8d9961b29965af08b7e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9wxd8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:13Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.356633 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:13Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.374551 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a92f3bb864cba0c6615e8a07233e923c0d0681e83ad6722aa7c7043fb5966d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:13Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.388929 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cfbgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5b94596-b945-4a89-b362-ec649e8e7981\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5154aa6a1a3a9b1a5a58752607c01b955e28b7e896bf9b99b5cca2448077ebd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm7xf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1f8c9f36d4fa9ccfe9c4fdf2794b819a0de928056fc3c320b6c83f9c518258f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm7xf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:56Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-cfbgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:13Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.412141 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85fccbbf-637b-4195-89ba-828db0f10fb3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11c497bb84920c85649975dc5a3ba883723017d5503200e47c08427f45b9edfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a594d1677deac968c88fac4a64d4b047cf02ad8d6db74ee2ebe170d4135f588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2db0acd86c2dfcd0303626aa6b66832b18aec98dab908328288cdf0e0a18f24d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72188960402faa7956adf357824c6ea0a7015a75ed5191853ca95b12feb2cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf2588ddec2be0fa843630cdc268617c399a7a1615705cc4b088fc806031d557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0546d8270e2cbba06a241e421fa7ea130e53302a3f71f81829198d7ab8f3e6b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0546d8270e2cbba06a241e421fa7ea130e53302a3f71f81829198d7ab8f3e6b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123ea91d0a01f780d09196824c362eab68a4b8d943d414e80a3a0bbc2959e178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://123ea91d0a01f780d09196824c362eab68a4b8d943d414e80a3a0bbc2959e178\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://17751d3a9dd568ddb6c27efe08354f37aa4af7a91312e8c41e3e2e509d5be8ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://17751d3a9dd568ddb6c27efe08354f37aa4af7a91312e8c41e3e2e509d5be8ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:13Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.423642 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.423686 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.423694 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.423710 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.423721 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:13Z","lastTransitionTime":"2025-09-29T21:26:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.431870 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c6a0af8cc03379c6fa2a3969e5852a340c32bd8513dc7c963e074bb1694d436\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:13Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.444918 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dnhjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fe62a53-f0b8-4c66-8adf-3b9f8bef4195\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b11f03867ce6c678d04431a19903c21acd7665f0cca4c7b0344e9d1cf0e17e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64hpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dnhjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:13Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.459158 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bp485" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"364060da-3bac-4f3e-b8b8-a64b0441cb5e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7dba9267c23bc8990d70b320a688e96796cf75aca99f5a3ce12fa82327cb2d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7962008bbec5a6556b4ab76cf151a68e971bde07fdc5fe560c561f42cf597d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7962008bbec5a6556b4ab76cf151a68e971bde07fdc5fe560c561f42cf597d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a6af280cf9196a9ed2a1798f7f4e7d6504ef6707c1d265fb9b232c02d70c205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a6af280cf9196a9ed2a1798f7f4e7d6504ef6707c1d265fb9b232c02d70c205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4783145a17a1233241a3766d9b259d16ed82b2076ff6790c1a368d713271cd91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4783145a17a1233241a3766d9b259d16ed82b2076ff6790c1a368d713271cd91\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0bb5fec15ae4eb585d8ba1c0bedabe62a4094ca50044a09b44439dc555f064d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0bb5fec15ae4eb585d8ba1c0bedabe62a4094ca50044a09b44439dc555f064d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c153df9063b1a1e897e8d02e139d641d00d84dce42004be4c69dd55d44e7436b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c153df9063b1a1e897e8d02e139d641d00d84dce42004be4c69dd55d44e7436b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9acf5d7a17bf6bb6d1ad415d830594b8f6b986fbb7c58706b19d28546dfae6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9acf5d7a17bf6bb6d1ad415d830594b8f6b986fbb7c58706b19d28546dfae6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bp485\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:13Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.469991 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dz6zq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c16af8d-e647-4820-b96f-298cce113ab1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e64c28e47bb861649d58b34f5e3c369cfaf8c808435b32a13ebfc2c02b7b6db9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbgg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dz6zq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:13Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.481782 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w647f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50640abc-40db-4390-82d1-f3cfc76da71c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0479676aa753b6e60f9a57eb2a3055ddc5b10bce1547ac60e216c0865e79f24b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://568d099d7beea914cf9f9eb703e36dd179359d3f4406bae454334e39b0de79ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w647f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:13Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.526729 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.526783 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.526815 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.526833 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.526845 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:13Z","lastTransitionTime":"2025-09-29T21:26:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.630118 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.630185 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.630203 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.630230 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.630249 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:13Z","lastTransitionTime":"2025-09-29T21:26:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.700145 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 21:26:13 crc kubenswrapper[4911]: E0929 21:26:13.700299 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.733476 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.733534 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.733555 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.733595 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.733615 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:13Z","lastTransitionTime":"2025-09-29T21:26:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.789786 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:26:13 crc kubenswrapper[4911]: E0929 21:26:13.790149 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 21:26:45.790107496 +0000 UTC m=+83.767220207 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.836540 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.836614 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.836634 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.836661 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.836684 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:13Z","lastTransitionTime":"2025-09-29T21:26:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.892096 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.892208 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.892253 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.892294 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 21:26:13 crc kubenswrapper[4911]: E0929 21:26:13.892346 4911 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 21:26:13 crc kubenswrapper[4911]: E0929 21:26:13.892463 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 21:26:45.892435104 +0000 UTC m=+83.869547815 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 21:26:13 crc kubenswrapper[4911]: E0929 21:26:13.892481 4911 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 21:26:13 crc kubenswrapper[4911]: E0929 21:26:13.892576 4911 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 21:26:13 crc kubenswrapper[4911]: E0929 21:26:13.892600 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 21:26:45.892570278 +0000 UTC m=+83.869682979 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 21:26:13 crc kubenswrapper[4911]: E0929 21:26:13.892601 4911 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 21:26:13 crc kubenswrapper[4911]: E0929 21:26:13.892499 4911 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 21:26:13 crc kubenswrapper[4911]: E0929 21:26:13.892635 4911 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 21:26:13 crc kubenswrapper[4911]: E0929 21:26:13.892656 4911 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 21:26:13 crc kubenswrapper[4911]: E0929 21:26:13.892682 4911 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 21:26:13 crc kubenswrapper[4911]: E0929 21:26:13.892709 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-29 21:26:45.892685631 +0000 UTC m=+83.869798332 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 21:26:13 crc kubenswrapper[4911]: E0929 21:26:13.892736 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-29 21:26:45.892722802 +0000 UTC m=+83.869835513 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.940234 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.940327 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.940353 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.940388 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:13 crc kubenswrapper[4911]: I0929 21:26:13.940414 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:13Z","lastTransitionTime":"2025-09-29T21:26:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:14 crc kubenswrapper[4911]: I0929 21:26:14.043681 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:14 crc kubenswrapper[4911]: I0929 21:26:14.043759 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:14 crc kubenswrapper[4911]: I0929 21:26:14.043829 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:14 crc kubenswrapper[4911]: I0929 21:26:14.043866 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:14 crc kubenswrapper[4911]: I0929 21:26:14.043891 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:14Z","lastTransitionTime":"2025-09-29T21:26:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:14 crc kubenswrapper[4911]: I0929 21:26:14.147503 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:14 crc kubenswrapper[4911]: I0929 21:26:14.147587 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:14 crc kubenswrapper[4911]: I0929 21:26:14.147609 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:14 crc kubenswrapper[4911]: I0929 21:26:14.147637 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:14 crc kubenswrapper[4911]: I0929 21:26:14.147657 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:14Z","lastTransitionTime":"2025-09-29T21:26:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:14 crc kubenswrapper[4911]: I0929 21:26:14.250578 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:14 crc kubenswrapper[4911]: I0929 21:26:14.250662 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:14 crc kubenswrapper[4911]: I0929 21:26:14.250687 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:14 crc kubenswrapper[4911]: I0929 21:26:14.250719 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:14 crc kubenswrapper[4911]: I0929 21:26:14.250741 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:14Z","lastTransitionTime":"2025-09-29T21:26:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:14 crc kubenswrapper[4911]: I0929 21:26:14.353308 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:14 crc kubenswrapper[4911]: I0929 21:26:14.353363 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:14 crc kubenswrapper[4911]: I0929 21:26:14.353379 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:14 crc kubenswrapper[4911]: I0929 21:26:14.353404 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:14 crc kubenswrapper[4911]: I0929 21:26:14.353423 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:14Z","lastTransitionTime":"2025-09-29T21:26:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:14 crc kubenswrapper[4911]: I0929 21:26:14.457459 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:14 crc kubenswrapper[4911]: I0929 21:26:14.457528 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:14 crc kubenswrapper[4911]: I0929 21:26:14.457546 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:14 crc kubenswrapper[4911]: I0929 21:26:14.457578 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:14 crc kubenswrapper[4911]: I0929 21:26:14.457601 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:14Z","lastTransitionTime":"2025-09-29T21:26:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:14 crc kubenswrapper[4911]: I0929 21:26:14.561170 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:14 crc kubenswrapper[4911]: I0929 21:26:14.561241 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:14 crc kubenswrapper[4911]: I0929 21:26:14.561262 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:14 crc kubenswrapper[4911]: I0929 21:26:14.561293 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:14 crc kubenswrapper[4911]: I0929 21:26:14.561315 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:14Z","lastTransitionTime":"2025-09-29T21:26:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:14 crc kubenswrapper[4911]: I0929 21:26:14.664559 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:14 crc kubenswrapper[4911]: I0929 21:26:14.664639 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:14 crc kubenswrapper[4911]: I0929 21:26:14.664657 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:14 crc kubenswrapper[4911]: I0929 21:26:14.664684 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:14 crc kubenswrapper[4911]: I0929 21:26:14.664703 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:14Z","lastTransitionTime":"2025-09-29T21:26:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:14 crc kubenswrapper[4911]: I0929 21:26:14.704331 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-d5gdh" Sep 29 21:26:14 crc kubenswrapper[4911]: I0929 21:26:14.704391 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 21:26:14 crc kubenswrapper[4911]: E0929 21:26:14.704647 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-d5gdh" podUID="b53f9593-39bf-43e0-b1de-09192d0167cd" Sep 29 21:26:14 crc kubenswrapper[4911]: I0929 21:26:14.704742 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:26:14 crc kubenswrapper[4911]: E0929 21:26:14.704921 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 21:26:14 crc kubenswrapper[4911]: E0929 21:26:14.705134 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 21:26:14 crc kubenswrapper[4911]: I0929 21:26:14.767594 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:14 crc kubenswrapper[4911]: I0929 21:26:14.767677 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:14 crc kubenswrapper[4911]: I0929 21:26:14.767701 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:14 crc kubenswrapper[4911]: I0929 21:26:14.767824 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:14 crc kubenswrapper[4911]: I0929 21:26:14.767855 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:14Z","lastTransitionTime":"2025-09-29T21:26:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:14 crc kubenswrapper[4911]: I0929 21:26:14.870613 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:14 crc kubenswrapper[4911]: I0929 21:26:14.870688 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:14 crc kubenswrapper[4911]: I0929 21:26:14.870705 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:14 crc kubenswrapper[4911]: I0929 21:26:14.870732 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:14 crc kubenswrapper[4911]: I0929 21:26:14.870750 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:14Z","lastTransitionTime":"2025-09-29T21:26:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:14 crc kubenswrapper[4911]: I0929 21:26:14.974355 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:14 crc kubenswrapper[4911]: I0929 21:26:14.974432 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:14 crc kubenswrapper[4911]: I0929 21:26:14.974458 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:14 crc kubenswrapper[4911]: I0929 21:26:14.974495 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:14 crc kubenswrapper[4911]: I0929 21:26:14.974521 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:14Z","lastTransitionTime":"2025-09-29T21:26:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:15 crc kubenswrapper[4911]: I0929 21:26:15.077432 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:15 crc kubenswrapper[4911]: I0929 21:26:15.077504 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:15 crc kubenswrapper[4911]: I0929 21:26:15.077529 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:15 crc kubenswrapper[4911]: I0929 21:26:15.077568 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:15 crc kubenswrapper[4911]: I0929 21:26:15.077595 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:15Z","lastTransitionTime":"2025-09-29T21:26:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:15 crc kubenswrapper[4911]: I0929 21:26:15.180410 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:15 crc kubenswrapper[4911]: I0929 21:26:15.180493 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:15 crc kubenswrapper[4911]: I0929 21:26:15.180519 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:15 crc kubenswrapper[4911]: I0929 21:26:15.180550 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:15 crc kubenswrapper[4911]: I0929 21:26:15.180576 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:15Z","lastTransitionTime":"2025-09-29T21:26:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:15 crc kubenswrapper[4911]: I0929 21:26:15.284345 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:15 crc kubenswrapper[4911]: I0929 21:26:15.284412 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:15 crc kubenswrapper[4911]: I0929 21:26:15.284433 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:15 crc kubenswrapper[4911]: I0929 21:26:15.284460 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:15 crc kubenswrapper[4911]: I0929 21:26:15.284485 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:15Z","lastTransitionTime":"2025-09-29T21:26:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:15 crc kubenswrapper[4911]: I0929 21:26:15.390782 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:15 crc kubenswrapper[4911]: I0929 21:26:15.390881 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:15 crc kubenswrapper[4911]: I0929 21:26:15.390901 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:15 crc kubenswrapper[4911]: I0929 21:26:15.390932 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:15 crc kubenswrapper[4911]: I0929 21:26:15.390952 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:15Z","lastTransitionTime":"2025-09-29T21:26:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:15 crc kubenswrapper[4911]: I0929 21:26:15.494608 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:15 crc kubenswrapper[4911]: I0929 21:26:15.494720 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:15 crc kubenswrapper[4911]: I0929 21:26:15.494735 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:15 crc kubenswrapper[4911]: I0929 21:26:15.494761 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:15 crc kubenswrapper[4911]: I0929 21:26:15.494775 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:15Z","lastTransitionTime":"2025-09-29T21:26:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:15 crc kubenswrapper[4911]: I0929 21:26:15.598464 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:15 crc kubenswrapper[4911]: I0929 21:26:15.598523 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:15 crc kubenswrapper[4911]: I0929 21:26:15.598536 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:15 crc kubenswrapper[4911]: I0929 21:26:15.598557 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:15 crc kubenswrapper[4911]: I0929 21:26:15.598569 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:15Z","lastTransitionTime":"2025-09-29T21:26:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:15 crc kubenswrapper[4911]: I0929 21:26:15.700030 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 21:26:15 crc kubenswrapper[4911]: E0929 21:26:15.700188 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 21:26:15 crc kubenswrapper[4911]: I0929 21:26:15.701535 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:15 crc kubenswrapper[4911]: I0929 21:26:15.701592 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:15 crc kubenswrapper[4911]: I0929 21:26:15.701609 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:15 crc kubenswrapper[4911]: I0929 21:26:15.701633 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:15 crc kubenswrapper[4911]: I0929 21:26:15.701652 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:15Z","lastTransitionTime":"2025-09-29T21:26:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:15 crc kubenswrapper[4911]: I0929 21:26:15.804174 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:15 crc kubenswrapper[4911]: I0929 21:26:15.804251 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:15 crc kubenswrapper[4911]: I0929 21:26:15.804277 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:15 crc kubenswrapper[4911]: I0929 21:26:15.804310 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:15 crc kubenswrapper[4911]: I0929 21:26:15.804338 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:15Z","lastTransitionTime":"2025-09-29T21:26:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:15 crc kubenswrapper[4911]: I0929 21:26:15.907787 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:15 crc kubenswrapper[4911]: I0929 21:26:15.908296 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:15 crc kubenswrapper[4911]: I0929 21:26:15.908494 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:15 crc kubenswrapper[4911]: I0929 21:26:15.908613 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:15 crc kubenswrapper[4911]: I0929 21:26:15.908755 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:15Z","lastTransitionTime":"2025-09-29T21:26:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:16 crc kubenswrapper[4911]: I0929 21:26:16.012445 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:16 crc kubenswrapper[4911]: I0929 21:26:16.013191 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:16 crc kubenswrapper[4911]: I0929 21:26:16.013442 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:16 crc kubenswrapper[4911]: I0929 21:26:16.013873 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:16 crc kubenswrapper[4911]: I0929 21:26:16.014349 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:16Z","lastTransitionTime":"2025-09-29T21:26:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:16 crc kubenswrapper[4911]: I0929 21:26:16.078179 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 21:26:16 crc kubenswrapper[4911]: I0929 21:26:16.095194 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Sep 29 21:26:16 crc kubenswrapper[4911]: I0929 21:26:16.105658 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:16Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:16 crc kubenswrapper[4911]: I0929 21:26:16.118544 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:16 crc kubenswrapper[4911]: I0929 21:26:16.118624 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:16 crc kubenswrapper[4911]: I0929 21:26:16.118648 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:16 crc kubenswrapper[4911]: I0929 21:26:16.118680 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:16 crc kubenswrapper[4911]: I0929 21:26:16.118705 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:16Z","lastTransitionTime":"2025-09-29T21:26:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:16 crc kubenswrapper[4911]: I0929 21:26:16.131127 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4052e710e0a06bf38d26e6006248d090051284bc0eae7a68b17241ef8016909\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a7d7fad5efb67cc7ef796977f03cc9c0e69fcc4340ca28749fffcc13236ed8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:16Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:16 crc kubenswrapper[4911]: I0929 21:26:16.154592 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lrfbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1179c900-e866-4c5a-bb06-6032cc03a075\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bb16b2f889ccaf66b96ffea2f6585967c14530a95a95000b652ce817e97c37e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jwhfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lrfbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:16Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:16 crc kubenswrapper[4911]: I0929 21:26:16.188088 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e3aa70f-b0da-44c9-a850-96d4494b02fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3568f71a4a1762e0d9be8ce4c317471f3f99a7c5b92b7daeb9fbace778f7e66b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00681163bfe474dabf977e0c43ac71148c2a472efd4bc57f3d145ba1134a74aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db1941e9959584d925b6ac546c234ce0dfaa2960a1277f282b51deb5ee56ec87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e87c1e61cde26c6ae4496e13ebe8e46857c8656720399f3d89b9e9a75bb4681d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c06f6555e780f2f9f69cabcd3e26595acfde3e973387ed7b10345c673c17f5d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c62aa953604f4ce35ca351ecf4e03da067af96f004c3ba3ea5f3a54b89def33d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de700bedd50f64841f1af98c4ba1a6b209e5990da83985a88e512b3079391876\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de700bedd50f64841f1af98c4ba1a6b209e5990da83985a88e512b3079391876\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T21:26:11Z\\\",\\\"message\\\":\\\"troller\\\\nI0929 21:26:11.697630 6582 admin_network_policy_namespace.go:56] Finished syncing Namespace openshift-service-ca-operator Admin Network Policy controller: took 9.93µs\\\\nI0929 21:26:11.697571 6582 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0929 21:26:11.697666 6582 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0929 21:26:11.697693 6582 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0929 21:26:11.697699 6582 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0929 21:26:11.697816 6582 factory.go:656] Stopping watch factory\\\\nI0929 21:26:11.697792 6582 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0929 21:26:11.697812 6582 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0929 21:26:11.697832 6582 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0929 21:26:11.697843 6582 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0929 21:26:11.697940 6582 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0929 21:26:11.698057 6582 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0929 21:26:11.698113 6582 ovnkube.go:599] Stopped ovnkube\\\\nI0929 21:26:11.698146 6582 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0929 21:26:11.698263 6582 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:26:10Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-9wxd8_openshift-ovn-kubernetes(4e3aa70f-b0da-44c9-a850-96d4494b02fc)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68ec54cc6e84d45b9550a2dcbd53b3ffc8cc97cb94da8d9961b29965af08b7e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9wxd8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:16Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:16 crc kubenswrapper[4911]: I0929 21:26:16.205633 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-d5gdh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b53f9593-39bf-43e0-b1de-09192d0167cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-24xkh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-24xkh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:57Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-d5gdh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:16Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:16 crc kubenswrapper[4911]: I0929 21:26:16.222059 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:16 crc kubenswrapper[4911]: I0929 21:26:16.222130 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:16 crc kubenswrapper[4911]: I0929 21:26:16.222158 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:16 crc kubenswrapper[4911]: I0929 21:26:16.222187 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:16 crc kubenswrapper[4911]: I0929 21:26:16.222206 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:16Z","lastTransitionTime":"2025-09-29T21:26:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:16 crc kubenswrapper[4911]: I0929 21:26:16.230139 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f56832a-7fd7-428e-85ee-55c5dea7b67a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ff3cf4013e69405637fe1483d5eb8ab2d12a0ba222c8446d93445dcb94ac9b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b32578d3e75bb839ad134f88850ba1b26e57846431fac71b64c5ddbce802464\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4539e6f57be0c686d5ea6ccbf1537c7c93698f81d2fdcfe5a143ce1f014e09\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57c9f68393ebed4fd5fe219544b0585a743989f8c54239e19ec702bddf9c30f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9dfa222360c949632050bd1dc5453a14c0dfa0d4614282132ceea2297405443e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T21:25:36Z\\\",\\\"message\\\":\\\"W0929 21:25:25.867198 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 21:25:25.867655 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759181125 cert, and key in /tmp/serving-cert-2091738680/serving-signer.crt, /tmp/serving-cert-2091738680/serving-signer.key\\\\nI0929 21:25:26.230899 1 observer_polling.go:159] Starting file observer\\\\nW0929 21:25:26.235864 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 21:25:26.236118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 21:25:26.238856 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2091738680/tls.crt::/tmp/serving-cert-2091738680/tls.key\\\\\\\"\\\\nF0929 21:25:36.606155 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47cf5d1e65f887e84d3c0694d5038edf3451c4af14d08a29d48562d05e36ea26\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:16Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:16 crc kubenswrapper[4911]: I0929 21:26:16.251244 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8646497-f7ad-4b38-a69f-9a14345af1c8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d3bb437bdac371dd4e22f20f304a5381c128ac2b774c51bc4f748a1c7aa3834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://129d51f84c8798ed7b1078f34899d4231c7bf4e1a75212f694c6c027b8463b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://290e37c77297fae7dcb3540911f6b2917e26daee3623c9106e34de394ccfa01f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7088f5a3752b8778de247eacb408d511197d1501e8e2811a98a9b713ca1fca04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:16Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:16 crc kubenswrapper[4911]: I0929 21:26:16.270905 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:16Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:16 crc kubenswrapper[4911]: I0929 21:26:16.290225 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a92f3bb864cba0c6615e8a07233e923c0d0681e83ad6722aa7c7043fb5966d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:16Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:16 crc kubenswrapper[4911]: I0929 21:26:16.307375 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cfbgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5b94596-b945-4a89-b362-ec649e8e7981\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5154aa6a1a3a9b1a5a58752607c01b955e28b7e896bf9b99b5cca2448077ebd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm7xf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1f8c9f36d4fa9ccfe9c4fdf2794b819a0de928056fc3c320b6c83f9c518258f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm7xf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:56Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-cfbgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:16Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:16 crc kubenswrapper[4911]: I0929 21:26:16.325720 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:16 crc kubenswrapper[4911]: I0929 21:26:16.325862 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:16 crc kubenswrapper[4911]: I0929 21:26:16.325890 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:16 crc kubenswrapper[4911]: I0929 21:26:16.325920 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:16 crc kubenswrapper[4911]: I0929 21:26:16.325945 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:16Z","lastTransitionTime":"2025-09-29T21:26:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:16 crc kubenswrapper[4911]: I0929 21:26:16.326544 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:16Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:16 crc kubenswrapper[4911]: I0929 21:26:16.348230 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bp485" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"364060da-3bac-4f3e-b8b8-a64b0441cb5e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7dba9267c23bc8990d70b320a688e96796cf75aca99f5a3ce12fa82327cb2d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7962008bbec5a6556b4ab76cf151a68e971bde07fdc5fe560c561f42cf597d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7962008bbec5a6556b4ab76cf151a68e971bde07fdc5fe560c561f42cf597d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a6af280cf9196a9ed2a1798f7f4e7d6504ef6707c1d265fb9b232c02d70c205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a6af280cf9196a9ed2a1798f7f4e7d6504ef6707c1d265fb9b232c02d70c205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4783145a17a1233241a3766d9b259d16ed82b2076ff6790c1a368d713271cd91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4783145a17a1233241a3766d9b259d16ed82b2076ff6790c1a368d713271cd91\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0bb5fec15ae4eb585d8ba1c0bedabe62a4094ca50044a09b44439dc555f064d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0bb5fec15ae4eb585d8ba1c0bedabe62a4094ca50044a09b44439dc555f064d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c153df9063b1a1e897e8d02e139d641d00d84dce42004be4c69dd55d44e7436b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c153df9063b1a1e897e8d02e139d641d00d84dce42004be4c69dd55d44e7436b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9acf5d7a17bf6bb6d1ad415d830594b8f6b986fbb7c58706b19d28546dfae6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9acf5d7a17bf6bb6d1ad415d830594b8f6b986fbb7c58706b19d28546dfae6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bp485\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:16Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:16 crc kubenswrapper[4911]: I0929 21:26:16.364584 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dz6zq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c16af8d-e647-4820-b96f-298cce113ab1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e64c28e47bb861649d58b34f5e3c369cfaf8c808435b32a13ebfc2c02b7b6db9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbgg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dz6zq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:16Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:16 crc kubenswrapper[4911]: I0929 21:26:16.400171 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85fccbbf-637b-4195-89ba-828db0f10fb3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11c497bb84920c85649975dc5a3ba883723017d5503200e47c08427f45b9edfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a594d1677deac968c88fac4a64d4b047cf02ad8d6db74ee2ebe170d4135f588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2db0acd86c2dfcd0303626aa6b66832b18aec98dab908328288cdf0e0a18f24d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72188960402faa7956adf357824c6ea0a7015a75ed5191853ca95b12feb2cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf2588ddec2be0fa843630cdc268617c399a7a1615705cc4b088fc806031d557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0546d8270e2cbba06a241e421fa7ea130e53302a3f71f81829198d7ab8f3e6b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0546d8270e2cbba06a241e421fa7ea130e53302a3f71f81829198d7ab8f3e6b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123ea91d0a01f780d09196824c362eab68a4b8d943d414e80a3a0bbc2959e178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://123ea91d0a01f780d09196824c362eab68a4b8d943d414e80a3a0bbc2959e178\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://17751d3a9dd568ddb6c27efe08354f37aa4af7a91312e8c41e3e2e509d5be8ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://17751d3a9dd568ddb6c27efe08354f37aa4af7a91312e8c41e3e2e509d5be8ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:16Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:16 crc kubenswrapper[4911]: I0929 21:26:16.422242 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c6a0af8cc03379c6fa2a3969e5852a340c32bd8513dc7c963e074bb1694d436\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:16Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:16 crc kubenswrapper[4911]: I0929 21:26:16.435850 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:16 crc kubenswrapper[4911]: I0929 21:26:16.435938 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:16 crc kubenswrapper[4911]: I0929 21:26:16.435958 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:16 crc kubenswrapper[4911]: I0929 21:26:16.435985 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:16 crc kubenswrapper[4911]: I0929 21:26:16.436015 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:16Z","lastTransitionTime":"2025-09-29T21:26:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:16 crc kubenswrapper[4911]: I0929 21:26:16.442363 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dnhjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fe62a53-f0b8-4c66-8adf-3b9f8bef4195\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b11f03867ce6c678d04431a19903c21acd7665f0cca4c7b0344e9d1cf0e17e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64hpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dnhjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:16Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:16 crc kubenswrapper[4911]: I0929 21:26:16.465394 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w647f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50640abc-40db-4390-82d1-f3cfc76da71c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0479676aa753b6e60f9a57eb2a3055ddc5b10bce1547ac60e216c0865e79f24b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://568d099d7beea914cf9f9eb703e36dd179359d3f4406bae454334e39b0de79ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w647f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:16Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:16 crc kubenswrapper[4911]: I0929 21:26:16.539483 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:16 crc kubenswrapper[4911]: I0929 21:26:16.539558 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:16 crc kubenswrapper[4911]: I0929 21:26:16.539581 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:16 crc kubenswrapper[4911]: I0929 21:26:16.539611 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:16 crc kubenswrapper[4911]: I0929 21:26:16.539632 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:16Z","lastTransitionTime":"2025-09-29T21:26:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:16 crc kubenswrapper[4911]: I0929 21:26:16.642622 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:16 crc kubenswrapper[4911]: I0929 21:26:16.642698 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:16 crc kubenswrapper[4911]: I0929 21:26:16.642717 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:16 crc kubenswrapper[4911]: I0929 21:26:16.642757 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:16 crc kubenswrapper[4911]: I0929 21:26:16.642777 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:16Z","lastTransitionTime":"2025-09-29T21:26:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:16 crc kubenswrapper[4911]: I0929 21:26:16.700910 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 21:26:16 crc kubenswrapper[4911]: I0929 21:26:16.700971 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:26:16 crc kubenswrapper[4911]: E0929 21:26:16.701064 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 21:26:16 crc kubenswrapper[4911]: I0929 21:26:16.701192 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-d5gdh" Sep 29 21:26:16 crc kubenswrapper[4911]: E0929 21:26:16.701351 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-d5gdh" podUID="b53f9593-39bf-43e0-b1de-09192d0167cd" Sep 29 21:26:16 crc kubenswrapper[4911]: E0929 21:26:16.701604 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 21:26:16 crc kubenswrapper[4911]: I0929 21:26:16.745547 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:16 crc kubenswrapper[4911]: I0929 21:26:16.745606 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:16 crc kubenswrapper[4911]: I0929 21:26:16.745624 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:16 crc kubenswrapper[4911]: I0929 21:26:16.745652 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:16 crc kubenswrapper[4911]: I0929 21:26:16.745671 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:16Z","lastTransitionTime":"2025-09-29T21:26:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:16 crc kubenswrapper[4911]: I0929 21:26:16.848573 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:16 crc kubenswrapper[4911]: I0929 21:26:16.848637 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:16 crc kubenswrapper[4911]: I0929 21:26:16.848655 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:16 crc kubenswrapper[4911]: I0929 21:26:16.848682 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:16 crc kubenswrapper[4911]: I0929 21:26:16.848728 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:16Z","lastTransitionTime":"2025-09-29T21:26:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:16 crc kubenswrapper[4911]: I0929 21:26:16.952469 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:16 crc kubenswrapper[4911]: I0929 21:26:16.952580 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:16 crc kubenswrapper[4911]: I0929 21:26:16.952604 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:16 crc kubenswrapper[4911]: I0929 21:26:16.952635 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:16 crc kubenswrapper[4911]: I0929 21:26:16.952658 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:16Z","lastTransitionTime":"2025-09-29T21:26:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:17 crc kubenswrapper[4911]: I0929 21:26:17.057299 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:17 crc kubenswrapper[4911]: I0929 21:26:17.057360 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:17 crc kubenswrapper[4911]: I0929 21:26:17.057378 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:17 crc kubenswrapper[4911]: I0929 21:26:17.057501 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:17 crc kubenswrapper[4911]: I0929 21:26:17.057693 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:17Z","lastTransitionTime":"2025-09-29T21:26:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:17 crc kubenswrapper[4911]: I0929 21:26:17.161215 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:17 crc kubenswrapper[4911]: I0929 21:26:17.161292 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:17 crc kubenswrapper[4911]: I0929 21:26:17.161306 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:17 crc kubenswrapper[4911]: I0929 21:26:17.161325 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:17 crc kubenswrapper[4911]: I0929 21:26:17.161339 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:17Z","lastTransitionTime":"2025-09-29T21:26:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:17 crc kubenswrapper[4911]: I0929 21:26:17.264310 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:17 crc kubenswrapper[4911]: I0929 21:26:17.264416 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:17 crc kubenswrapper[4911]: I0929 21:26:17.264436 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:17 crc kubenswrapper[4911]: I0929 21:26:17.264462 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:17 crc kubenswrapper[4911]: I0929 21:26:17.264483 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:17Z","lastTransitionTime":"2025-09-29T21:26:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:17 crc kubenswrapper[4911]: I0929 21:26:17.368170 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:17 crc kubenswrapper[4911]: I0929 21:26:17.368280 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:17 crc kubenswrapper[4911]: I0929 21:26:17.368311 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:17 crc kubenswrapper[4911]: I0929 21:26:17.368348 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:17 crc kubenswrapper[4911]: I0929 21:26:17.368380 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:17Z","lastTransitionTime":"2025-09-29T21:26:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:17 crc kubenswrapper[4911]: I0929 21:26:17.471750 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:17 crc kubenswrapper[4911]: I0929 21:26:17.471861 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:17 crc kubenswrapper[4911]: I0929 21:26:17.471883 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:17 crc kubenswrapper[4911]: I0929 21:26:17.471909 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:17 crc kubenswrapper[4911]: I0929 21:26:17.471929 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:17Z","lastTransitionTime":"2025-09-29T21:26:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:17 crc kubenswrapper[4911]: I0929 21:26:17.574435 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:17 crc kubenswrapper[4911]: I0929 21:26:17.574482 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:17 crc kubenswrapper[4911]: I0929 21:26:17.574499 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:17 crc kubenswrapper[4911]: I0929 21:26:17.574521 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:17 crc kubenswrapper[4911]: I0929 21:26:17.574537 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:17Z","lastTransitionTime":"2025-09-29T21:26:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:17 crc kubenswrapper[4911]: I0929 21:26:17.677895 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:17 crc kubenswrapper[4911]: I0929 21:26:17.677975 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:17 crc kubenswrapper[4911]: I0929 21:26:17.678000 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:17 crc kubenswrapper[4911]: I0929 21:26:17.678033 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:17 crc kubenswrapper[4911]: I0929 21:26:17.678057 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:17Z","lastTransitionTime":"2025-09-29T21:26:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:17 crc kubenswrapper[4911]: I0929 21:26:17.700327 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 21:26:17 crc kubenswrapper[4911]: E0929 21:26:17.700519 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 21:26:17 crc kubenswrapper[4911]: I0929 21:26:17.781703 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:17 crc kubenswrapper[4911]: I0929 21:26:17.781769 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:17 crc kubenswrapper[4911]: I0929 21:26:17.781787 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:17 crc kubenswrapper[4911]: I0929 21:26:17.781849 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:17 crc kubenswrapper[4911]: I0929 21:26:17.781870 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:17Z","lastTransitionTime":"2025-09-29T21:26:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:17 crc kubenswrapper[4911]: I0929 21:26:17.885022 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:17 crc kubenswrapper[4911]: I0929 21:26:17.885095 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:17 crc kubenswrapper[4911]: I0929 21:26:17.885113 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:17 crc kubenswrapper[4911]: I0929 21:26:17.885141 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:17 crc kubenswrapper[4911]: I0929 21:26:17.885173 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:17Z","lastTransitionTime":"2025-09-29T21:26:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:17 crc kubenswrapper[4911]: I0929 21:26:17.988719 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:17 crc kubenswrapper[4911]: I0929 21:26:17.988824 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:17 crc kubenswrapper[4911]: I0929 21:26:17.988846 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:17 crc kubenswrapper[4911]: I0929 21:26:17.988878 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:17 crc kubenswrapper[4911]: I0929 21:26:17.988898 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:17Z","lastTransitionTime":"2025-09-29T21:26:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:18 crc kubenswrapper[4911]: I0929 21:26:18.093180 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:18 crc kubenswrapper[4911]: I0929 21:26:18.093252 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:18 crc kubenswrapper[4911]: I0929 21:26:18.093277 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:18 crc kubenswrapper[4911]: I0929 21:26:18.093305 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:18 crc kubenswrapper[4911]: I0929 21:26:18.093325 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:18Z","lastTransitionTime":"2025-09-29T21:26:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:18 crc kubenswrapper[4911]: I0929 21:26:18.197199 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:18 crc kubenswrapper[4911]: I0929 21:26:18.197271 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:18 crc kubenswrapper[4911]: I0929 21:26:18.197286 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:18 crc kubenswrapper[4911]: I0929 21:26:18.197308 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:18 crc kubenswrapper[4911]: I0929 21:26:18.197325 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:18Z","lastTransitionTime":"2025-09-29T21:26:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:18 crc kubenswrapper[4911]: I0929 21:26:18.301285 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:18 crc kubenswrapper[4911]: I0929 21:26:18.301361 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:18 crc kubenswrapper[4911]: I0929 21:26:18.301385 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:18 crc kubenswrapper[4911]: I0929 21:26:18.301418 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:18 crc kubenswrapper[4911]: I0929 21:26:18.301440 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:18Z","lastTransitionTime":"2025-09-29T21:26:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:18 crc kubenswrapper[4911]: I0929 21:26:18.404942 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:18 crc kubenswrapper[4911]: I0929 21:26:18.405002 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:18 crc kubenswrapper[4911]: I0929 21:26:18.405026 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:18 crc kubenswrapper[4911]: I0929 21:26:18.405053 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:18 crc kubenswrapper[4911]: I0929 21:26:18.405070 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:18Z","lastTransitionTime":"2025-09-29T21:26:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:18 crc kubenswrapper[4911]: I0929 21:26:18.508991 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:18 crc kubenswrapper[4911]: I0929 21:26:18.509062 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:18 crc kubenswrapper[4911]: I0929 21:26:18.509081 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:18 crc kubenswrapper[4911]: I0929 21:26:18.509107 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:18 crc kubenswrapper[4911]: I0929 21:26:18.509126 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:18Z","lastTransitionTime":"2025-09-29T21:26:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:18 crc kubenswrapper[4911]: I0929 21:26:18.612167 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:18 crc kubenswrapper[4911]: I0929 21:26:18.612227 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:18 crc kubenswrapper[4911]: I0929 21:26:18.612236 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:18 crc kubenswrapper[4911]: I0929 21:26:18.612251 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:18 crc kubenswrapper[4911]: I0929 21:26:18.612260 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:18Z","lastTransitionTime":"2025-09-29T21:26:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:18 crc kubenswrapper[4911]: I0929 21:26:18.700728 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:26:18 crc kubenswrapper[4911]: I0929 21:26:18.700859 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 21:26:18 crc kubenswrapper[4911]: E0929 21:26:18.700974 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 21:26:18 crc kubenswrapper[4911]: E0929 21:26:18.701174 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 21:26:18 crc kubenswrapper[4911]: I0929 21:26:18.701249 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-d5gdh" Sep 29 21:26:18 crc kubenswrapper[4911]: E0929 21:26:18.701486 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-d5gdh" podUID="b53f9593-39bf-43e0-b1de-09192d0167cd" Sep 29 21:26:18 crc kubenswrapper[4911]: I0929 21:26:18.715439 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:18 crc kubenswrapper[4911]: I0929 21:26:18.715483 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:18 crc kubenswrapper[4911]: I0929 21:26:18.715491 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:18 crc kubenswrapper[4911]: I0929 21:26:18.715506 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:18 crc kubenswrapper[4911]: I0929 21:26:18.715516 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:18Z","lastTransitionTime":"2025-09-29T21:26:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:18 crc kubenswrapper[4911]: I0929 21:26:18.819885 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:18 crc kubenswrapper[4911]: I0929 21:26:18.819985 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:18 crc kubenswrapper[4911]: I0929 21:26:18.820015 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:18 crc kubenswrapper[4911]: I0929 21:26:18.820052 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:18 crc kubenswrapper[4911]: I0929 21:26:18.820084 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:18Z","lastTransitionTime":"2025-09-29T21:26:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:18 crc kubenswrapper[4911]: I0929 21:26:18.923780 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:18 crc kubenswrapper[4911]: I0929 21:26:18.923901 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:18 crc kubenswrapper[4911]: I0929 21:26:18.923920 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:18 crc kubenswrapper[4911]: I0929 21:26:18.923951 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:18 crc kubenswrapper[4911]: I0929 21:26:18.923979 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:18Z","lastTransitionTime":"2025-09-29T21:26:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:19 crc kubenswrapper[4911]: I0929 21:26:19.028676 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:19 crc kubenswrapper[4911]: I0929 21:26:19.028720 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:19 crc kubenswrapper[4911]: I0929 21:26:19.028732 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:19 crc kubenswrapper[4911]: I0929 21:26:19.028750 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:19 crc kubenswrapper[4911]: I0929 21:26:19.028763 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:19Z","lastTransitionTime":"2025-09-29T21:26:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:19 crc kubenswrapper[4911]: I0929 21:26:19.132788 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:19 crc kubenswrapper[4911]: I0929 21:26:19.132930 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:19 crc kubenswrapper[4911]: I0929 21:26:19.132956 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:19 crc kubenswrapper[4911]: I0929 21:26:19.132990 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:19 crc kubenswrapper[4911]: I0929 21:26:19.133014 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:19Z","lastTransitionTime":"2025-09-29T21:26:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:19 crc kubenswrapper[4911]: I0929 21:26:19.236360 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:19 crc kubenswrapper[4911]: I0929 21:26:19.236416 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:19 crc kubenswrapper[4911]: I0929 21:26:19.236425 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:19 crc kubenswrapper[4911]: I0929 21:26:19.236441 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:19 crc kubenswrapper[4911]: I0929 21:26:19.236452 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:19Z","lastTransitionTime":"2025-09-29T21:26:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:19 crc kubenswrapper[4911]: I0929 21:26:19.339115 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:19 crc kubenswrapper[4911]: I0929 21:26:19.339155 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:19 crc kubenswrapper[4911]: I0929 21:26:19.339164 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:19 crc kubenswrapper[4911]: I0929 21:26:19.339177 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:19 crc kubenswrapper[4911]: I0929 21:26:19.339189 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:19Z","lastTransitionTime":"2025-09-29T21:26:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:19 crc kubenswrapper[4911]: I0929 21:26:19.411600 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:19 crc kubenswrapper[4911]: I0929 21:26:19.411763 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:19 crc kubenswrapper[4911]: I0929 21:26:19.411786 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:19 crc kubenswrapper[4911]: I0929 21:26:19.411849 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:19 crc kubenswrapper[4911]: I0929 21:26:19.411876 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:19Z","lastTransitionTime":"2025-09-29T21:26:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:19 crc kubenswrapper[4911]: E0929 21:26:19.429625 4911 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7dded9b5-8ab5-45b2-be5a-4613b6e8208f\\\",\\\"systemUUID\\\":\\\"6cb362cf-0841-40fb-a840-f46642f78745\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:19Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:19 crc kubenswrapper[4911]: I0929 21:26:19.435931 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:19 crc kubenswrapper[4911]: I0929 21:26:19.435979 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:19 crc kubenswrapper[4911]: I0929 21:26:19.435999 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:19 crc kubenswrapper[4911]: I0929 21:26:19.436022 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:19 crc kubenswrapper[4911]: I0929 21:26:19.436040 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:19Z","lastTransitionTime":"2025-09-29T21:26:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:19 crc kubenswrapper[4911]: E0929 21:26:19.451149 4911 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7dded9b5-8ab5-45b2-be5a-4613b6e8208f\\\",\\\"systemUUID\\\":\\\"6cb362cf-0841-40fb-a840-f46642f78745\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:19Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:19 crc kubenswrapper[4911]: I0929 21:26:19.456867 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:19 crc kubenswrapper[4911]: I0929 21:26:19.456930 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:19 crc kubenswrapper[4911]: I0929 21:26:19.456949 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:19 crc kubenswrapper[4911]: I0929 21:26:19.456973 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:19 crc kubenswrapper[4911]: I0929 21:26:19.456991 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:19Z","lastTransitionTime":"2025-09-29T21:26:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:19 crc kubenswrapper[4911]: E0929 21:26:19.479535 4911 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7dded9b5-8ab5-45b2-be5a-4613b6e8208f\\\",\\\"systemUUID\\\":\\\"6cb362cf-0841-40fb-a840-f46642f78745\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:19Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:19 crc kubenswrapper[4911]: I0929 21:26:19.486576 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:19 crc kubenswrapper[4911]: I0929 21:26:19.486681 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:19 crc kubenswrapper[4911]: I0929 21:26:19.486703 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:19 crc kubenswrapper[4911]: I0929 21:26:19.486730 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:19 crc kubenswrapper[4911]: I0929 21:26:19.486749 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:19Z","lastTransitionTime":"2025-09-29T21:26:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:19 crc kubenswrapper[4911]: E0929 21:26:19.508155 4911 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7dded9b5-8ab5-45b2-be5a-4613b6e8208f\\\",\\\"systemUUID\\\":\\\"6cb362cf-0841-40fb-a840-f46642f78745\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:19Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:19 crc kubenswrapper[4911]: I0929 21:26:19.512546 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:19 crc kubenswrapper[4911]: I0929 21:26:19.512570 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:19 crc kubenswrapper[4911]: I0929 21:26:19.512581 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:19 crc kubenswrapper[4911]: I0929 21:26:19.512596 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:19 crc kubenswrapper[4911]: I0929 21:26:19.512608 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:19Z","lastTransitionTime":"2025-09-29T21:26:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:19 crc kubenswrapper[4911]: E0929 21:26:19.527567 4911 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7dded9b5-8ab5-45b2-be5a-4613b6e8208f\\\",\\\"systemUUID\\\":\\\"6cb362cf-0841-40fb-a840-f46642f78745\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:19Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:19 crc kubenswrapper[4911]: E0929 21:26:19.527721 4911 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 29 21:26:19 crc kubenswrapper[4911]: I0929 21:26:19.529595 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:19 crc kubenswrapper[4911]: I0929 21:26:19.529621 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:19 crc kubenswrapper[4911]: I0929 21:26:19.529632 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:19 crc kubenswrapper[4911]: I0929 21:26:19.529647 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:19 crc kubenswrapper[4911]: I0929 21:26:19.529659 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:19Z","lastTransitionTime":"2025-09-29T21:26:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:19 crc kubenswrapper[4911]: I0929 21:26:19.636034 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:19 crc kubenswrapper[4911]: I0929 21:26:19.636110 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:19 crc kubenswrapper[4911]: I0929 21:26:19.636132 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:19 crc kubenswrapper[4911]: I0929 21:26:19.636159 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:19 crc kubenswrapper[4911]: I0929 21:26:19.636180 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:19Z","lastTransitionTime":"2025-09-29T21:26:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:19 crc kubenswrapper[4911]: I0929 21:26:19.701016 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 21:26:19 crc kubenswrapper[4911]: E0929 21:26:19.701220 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 21:26:19 crc kubenswrapper[4911]: I0929 21:26:19.739935 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:19 crc kubenswrapper[4911]: I0929 21:26:19.740001 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:19 crc kubenswrapper[4911]: I0929 21:26:19.740019 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:19 crc kubenswrapper[4911]: I0929 21:26:19.740046 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:19 crc kubenswrapper[4911]: I0929 21:26:19.740065 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:19Z","lastTransitionTime":"2025-09-29T21:26:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:19 crc kubenswrapper[4911]: I0929 21:26:19.844468 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:19 crc kubenswrapper[4911]: I0929 21:26:19.844517 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:19 crc kubenswrapper[4911]: I0929 21:26:19.844528 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:19 crc kubenswrapper[4911]: I0929 21:26:19.844552 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:19 crc kubenswrapper[4911]: I0929 21:26:19.844564 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:19Z","lastTransitionTime":"2025-09-29T21:26:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:19 crc kubenswrapper[4911]: I0929 21:26:19.947783 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:19 crc kubenswrapper[4911]: I0929 21:26:19.947903 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:19 crc kubenswrapper[4911]: I0929 21:26:19.947923 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:19 crc kubenswrapper[4911]: I0929 21:26:19.947988 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:19 crc kubenswrapper[4911]: I0929 21:26:19.948012 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:19Z","lastTransitionTime":"2025-09-29T21:26:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:20 crc kubenswrapper[4911]: I0929 21:26:20.051666 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:20 crc kubenswrapper[4911]: I0929 21:26:20.051711 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:20 crc kubenswrapper[4911]: I0929 21:26:20.051720 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:20 crc kubenswrapper[4911]: I0929 21:26:20.051739 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:20 crc kubenswrapper[4911]: I0929 21:26:20.051752 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:20Z","lastTransitionTime":"2025-09-29T21:26:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:20 crc kubenswrapper[4911]: I0929 21:26:20.155276 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:20 crc kubenswrapper[4911]: I0929 21:26:20.155335 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:20 crc kubenswrapper[4911]: I0929 21:26:20.155350 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:20 crc kubenswrapper[4911]: I0929 21:26:20.155374 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:20 crc kubenswrapper[4911]: I0929 21:26:20.155391 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:20Z","lastTransitionTime":"2025-09-29T21:26:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:20 crc kubenswrapper[4911]: I0929 21:26:20.258708 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:20 crc kubenswrapper[4911]: I0929 21:26:20.258770 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:20 crc kubenswrapper[4911]: I0929 21:26:20.258784 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:20 crc kubenswrapper[4911]: I0929 21:26:20.258832 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:20 crc kubenswrapper[4911]: I0929 21:26:20.258845 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:20Z","lastTransitionTime":"2025-09-29T21:26:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:20 crc kubenswrapper[4911]: I0929 21:26:20.361842 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:20 crc kubenswrapper[4911]: I0929 21:26:20.362332 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:20 crc kubenswrapper[4911]: I0929 21:26:20.363023 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:20 crc kubenswrapper[4911]: I0929 21:26:20.363514 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:20 crc kubenswrapper[4911]: I0929 21:26:20.363644 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:20Z","lastTransitionTime":"2025-09-29T21:26:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:20 crc kubenswrapper[4911]: I0929 21:26:20.469741 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:20 crc kubenswrapper[4911]: I0929 21:26:20.469809 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:20 crc kubenswrapper[4911]: I0929 21:26:20.469825 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:20 crc kubenswrapper[4911]: I0929 21:26:20.469844 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:20 crc kubenswrapper[4911]: I0929 21:26:20.469855 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:20Z","lastTransitionTime":"2025-09-29T21:26:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:20 crc kubenswrapper[4911]: I0929 21:26:20.572711 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:20 crc kubenswrapper[4911]: I0929 21:26:20.572850 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:20 crc kubenswrapper[4911]: I0929 21:26:20.572877 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:20 crc kubenswrapper[4911]: I0929 21:26:20.572910 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:20 crc kubenswrapper[4911]: I0929 21:26:20.572935 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:20Z","lastTransitionTime":"2025-09-29T21:26:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:20 crc kubenswrapper[4911]: I0929 21:26:20.675711 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:20 crc kubenswrapper[4911]: I0929 21:26:20.675758 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:20 crc kubenswrapper[4911]: I0929 21:26:20.675773 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:20 crc kubenswrapper[4911]: I0929 21:26:20.675808 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:20 crc kubenswrapper[4911]: I0929 21:26:20.675822 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:20Z","lastTransitionTime":"2025-09-29T21:26:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:20 crc kubenswrapper[4911]: I0929 21:26:20.700718 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 21:26:20 crc kubenswrapper[4911]: I0929 21:26:20.700853 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:26:20 crc kubenswrapper[4911]: I0929 21:26:20.700855 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-d5gdh" Sep 29 21:26:20 crc kubenswrapper[4911]: E0929 21:26:20.701040 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 21:26:20 crc kubenswrapper[4911]: E0929 21:26:20.701145 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 21:26:20 crc kubenswrapper[4911]: E0929 21:26:20.701437 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-d5gdh" podUID="b53f9593-39bf-43e0-b1de-09192d0167cd" Sep 29 21:26:20 crc kubenswrapper[4911]: I0929 21:26:20.778012 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:20 crc kubenswrapper[4911]: I0929 21:26:20.778054 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:20 crc kubenswrapper[4911]: I0929 21:26:20.778065 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:20 crc kubenswrapper[4911]: I0929 21:26:20.778083 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:20 crc kubenswrapper[4911]: I0929 21:26:20.778094 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:20Z","lastTransitionTime":"2025-09-29T21:26:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:20 crc kubenswrapper[4911]: I0929 21:26:20.881430 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:20 crc kubenswrapper[4911]: I0929 21:26:20.881970 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:20 crc kubenswrapper[4911]: I0929 21:26:20.881993 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:20 crc kubenswrapper[4911]: I0929 21:26:20.882012 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:20 crc kubenswrapper[4911]: I0929 21:26:20.882025 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:20Z","lastTransitionTime":"2025-09-29T21:26:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:20 crc kubenswrapper[4911]: I0929 21:26:20.985479 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:20 crc kubenswrapper[4911]: I0929 21:26:20.985530 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:20 crc kubenswrapper[4911]: I0929 21:26:20.985544 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:20 crc kubenswrapper[4911]: I0929 21:26:20.985567 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:20 crc kubenswrapper[4911]: I0929 21:26:20.985581 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:20Z","lastTransitionTime":"2025-09-29T21:26:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:21 crc kubenswrapper[4911]: I0929 21:26:21.088123 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:21 crc kubenswrapper[4911]: I0929 21:26:21.088166 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:21 crc kubenswrapper[4911]: I0929 21:26:21.088177 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:21 crc kubenswrapper[4911]: I0929 21:26:21.088194 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:21 crc kubenswrapper[4911]: I0929 21:26:21.088206 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:21Z","lastTransitionTime":"2025-09-29T21:26:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:21 crc kubenswrapper[4911]: I0929 21:26:21.191427 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:21 crc kubenswrapper[4911]: I0929 21:26:21.191804 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:21 crc kubenswrapper[4911]: I0929 21:26:21.191901 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:21 crc kubenswrapper[4911]: I0929 21:26:21.191977 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:21 crc kubenswrapper[4911]: I0929 21:26:21.192097 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:21Z","lastTransitionTime":"2025-09-29T21:26:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:21 crc kubenswrapper[4911]: I0929 21:26:21.294928 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:21 crc kubenswrapper[4911]: I0929 21:26:21.294969 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:21 crc kubenswrapper[4911]: I0929 21:26:21.294978 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:21 crc kubenswrapper[4911]: I0929 21:26:21.294995 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:21 crc kubenswrapper[4911]: I0929 21:26:21.295005 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:21Z","lastTransitionTime":"2025-09-29T21:26:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:21 crc kubenswrapper[4911]: I0929 21:26:21.399095 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:21 crc kubenswrapper[4911]: I0929 21:26:21.399338 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:21 crc kubenswrapper[4911]: I0929 21:26:21.399434 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:21 crc kubenswrapper[4911]: I0929 21:26:21.399506 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:21 crc kubenswrapper[4911]: I0929 21:26:21.399565 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:21Z","lastTransitionTime":"2025-09-29T21:26:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:21 crc kubenswrapper[4911]: I0929 21:26:21.502121 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:21 crc kubenswrapper[4911]: I0929 21:26:21.502168 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:21 crc kubenswrapper[4911]: I0929 21:26:21.502182 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:21 crc kubenswrapper[4911]: I0929 21:26:21.502204 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:21 crc kubenswrapper[4911]: I0929 21:26:21.502218 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:21Z","lastTransitionTime":"2025-09-29T21:26:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:21 crc kubenswrapper[4911]: I0929 21:26:21.604633 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:21 crc kubenswrapper[4911]: I0929 21:26:21.604704 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:21 crc kubenswrapper[4911]: I0929 21:26:21.604716 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:21 crc kubenswrapper[4911]: I0929 21:26:21.604734 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:21 crc kubenswrapper[4911]: I0929 21:26:21.604748 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:21Z","lastTransitionTime":"2025-09-29T21:26:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:21 crc kubenswrapper[4911]: I0929 21:26:21.700884 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 21:26:21 crc kubenswrapper[4911]: E0929 21:26:21.701060 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 21:26:21 crc kubenswrapper[4911]: I0929 21:26:21.707859 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:21 crc kubenswrapper[4911]: I0929 21:26:21.707897 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:21 crc kubenswrapper[4911]: I0929 21:26:21.707908 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:21 crc kubenswrapper[4911]: I0929 21:26:21.707929 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:21 crc kubenswrapper[4911]: I0929 21:26:21.707941 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:21Z","lastTransitionTime":"2025-09-29T21:26:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:21 crc kubenswrapper[4911]: I0929 21:26:21.811696 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:21 crc kubenswrapper[4911]: I0929 21:26:21.811761 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:21 crc kubenswrapper[4911]: I0929 21:26:21.811772 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:21 crc kubenswrapper[4911]: I0929 21:26:21.811814 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:21 crc kubenswrapper[4911]: I0929 21:26:21.811825 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:21Z","lastTransitionTime":"2025-09-29T21:26:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:21 crc kubenswrapper[4911]: I0929 21:26:21.915570 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:21 crc kubenswrapper[4911]: I0929 21:26:21.915623 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:21 crc kubenswrapper[4911]: I0929 21:26:21.915632 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:21 crc kubenswrapper[4911]: I0929 21:26:21.915689 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:21 crc kubenswrapper[4911]: I0929 21:26:21.915701 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:21Z","lastTransitionTime":"2025-09-29T21:26:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:22 crc kubenswrapper[4911]: I0929 21:26:22.019146 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:22 crc kubenswrapper[4911]: I0929 21:26:22.019204 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:22 crc kubenswrapper[4911]: I0929 21:26:22.019213 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:22 crc kubenswrapper[4911]: I0929 21:26:22.019232 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:22 crc kubenswrapper[4911]: I0929 21:26:22.019244 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:22Z","lastTransitionTime":"2025-09-29T21:26:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:22 crc kubenswrapper[4911]: I0929 21:26:22.122993 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:22 crc kubenswrapper[4911]: I0929 21:26:22.123037 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:22 crc kubenswrapper[4911]: I0929 21:26:22.123047 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:22 crc kubenswrapper[4911]: I0929 21:26:22.123069 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:22 crc kubenswrapper[4911]: I0929 21:26:22.123079 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:22Z","lastTransitionTime":"2025-09-29T21:26:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:22 crc kubenswrapper[4911]: I0929 21:26:22.225854 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:22 crc kubenswrapper[4911]: I0929 21:26:22.225952 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:22 crc kubenswrapper[4911]: I0929 21:26:22.225980 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:22 crc kubenswrapper[4911]: I0929 21:26:22.226017 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:22 crc kubenswrapper[4911]: I0929 21:26:22.226054 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:22Z","lastTransitionTime":"2025-09-29T21:26:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:22 crc kubenswrapper[4911]: I0929 21:26:22.329026 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:22 crc kubenswrapper[4911]: I0929 21:26:22.329093 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:22 crc kubenswrapper[4911]: I0929 21:26:22.329112 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:22 crc kubenswrapper[4911]: I0929 21:26:22.329143 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:22 crc kubenswrapper[4911]: I0929 21:26:22.329162 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:22Z","lastTransitionTime":"2025-09-29T21:26:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:22 crc kubenswrapper[4911]: I0929 21:26:22.433088 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:22 crc kubenswrapper[4911]: I0929 21:26:22.433422 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:22 crc kubenswrapper[4911]: I0929 21:26:22.433514 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:22 crc kubenswrapper[4911]: I0929 21:26:22.433587 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:22 crc kubenswrapper[4911]: I0929 21:26:22.433656 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:22Z","lastTransitionTime":"2025-09-29T21:26:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:22 crc kubenswrapper[4911]: I0929 21:26:22.535983 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:22 crc kubenswrapper[4911]: I0929 21:26:22.536098 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:22 crc kubenswrapper[4911]: I0929 21:26:22.536118 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:22 crc kubenswrapper[4911]: I0929 21:26:22.536141 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:22 crc kubenswrapper[4911]: I0929 21:26:22.536164 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:22Z","lastTransitionTime":"2025-09-29T21:26:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:22 crc kubenswrapper[4911]: I0929 21:26:22.643753 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:22 crc kubenswrapper[4911]: I0929 21:26:22.643878 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:22 crc kubenswrapper[4911]: I0929 21:26:22.643908 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:22 crc kubenswrapper[4911]: I0929 21:26:22.643953 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:22 crc kubenswrapper[4911]: I0929 21:26:22.643981 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:22Z","lastTransitionTime":"2025-09-29T21:26:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:22 crc kubenswrapper[4911]: I0929 21:26:22.700137 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:26:22 crc kubenswrapper[4911]: E0929 21:26:22.700371 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 21:26:22 crc kubenswrapper[4911]: I0929 21:26:22.700671 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-d5gdh" Sep 29 21:26:22 crc kubenswrapper[4911]: I0929 21:26:22.700881 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 21:26:22 crc kubenswrapper[4911]: E0929 21:26:22.701255 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-d5gdh" podUID="b53f9593-39bf-43e0-b1de-09192d0167cd" Sep 29 21:26:22 crc kubenswrapper[4911]: E0929 21:26:22.701385 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 21:26:22 crc kubenswrapper[4911]: I0929 21:26:22.736201 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85fccbbf-637b-4195-89ba-828db0f10fb3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11c497bb84920c85649975dc5a3ba883723017d5503200e47c08427f45b9edfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a594d1677deac968c88fac4a64d4b047cf02ad8d6db74ee2ebe170d4135f588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2db0acd86c2dfcd0303626aa6b66832b18aec98dab908328288cdf0e0a18f24d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72188960402faa7956adf357824c6ea0a7015a75ed5191853ca95b12feb2cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf2588ddec2be0fa843630cdc268617c399a7a1615705cc4b088fc806031d557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0546d8270e2cbba06a241e421fa7ea130e53302a3f71f81829198d7ab8f3e6b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0546d8270e2cbba06a241e421fa7ea130e53302a3f71f81829198d7ab8f3e6b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123ea91d0a01f780d09196824c362eab68a4b8d943d414e80a3a0bbc2959e178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://123ea91d0a01f780d09196824c362eab68a4b8d943d414e80a3a0bbc2959e178\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://17751d3a9dd568ddb6c27efe08354f37aa4af7a91312e8c41e3e2e509d5be8ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://17751d3a9dd568ddb6c27efe08354f37aa4af7a91312e8c41e3e2e509d5be8ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:22Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:22 crc kubenswrapper[4911]: I0929 21:26:22.747355 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:22 crc kubenswrapper[4911]: I0929 21:26:22.747422 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:22 crc kubenswrapper[4911]: I0929 21:26:22.747440 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:22 crc kubenswrapper[4911]: I0929 21:26:22.747462 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:22 crc kubenswrapper[4911]: I0929 21:26:22.747479 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:22Z","lastTransitionTime":"2025-09-29T21:26:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:22 crc kubenswrapper[4911]: I0929 21:26:22.763910 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c6a0af8cc03379c6fa2a3969e5852a340c32bd8513dc7c963e074bb1694d436\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:22Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:22 crc kubenswrapper[4911]: I0929 21:26:22.779722 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dnhjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fe62a53-f0b8-4c66-8adf-3b9f8bef4195\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b11f03867ce6c678d04431a19903c21acd7665f0cca4c7b0344e9d1cf0e17e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64hpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dnhjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:22Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:22 crc kubenswrapper[4911]: I0929 21:26:22.805716 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bp485" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"364060da-3bac-4f3e-b8b8-a64b0441cb5e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7dba9267c23bc8990d70b320a688e96796cf75aca99f5a3ce12fa82327cb2d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7962008bbec5a6556b4ab76cf151a68e971bde07fdc5fe560c561f42cf597d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7962008bbec5a6556b4ab76cf151a68e971bde07fdc5fe560c561f42cf597d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a6af280cf9196a9ed2a1798f7f4e7d6504ef6707c1d265fb9b232c02d70c205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a6af280cf9196a9ed2a1798f7f4e7d6504ef6707c1d265fb9b232c02d70c205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4783145a17a1233241a3766d9b259d16ed82b2076ff6790c1a368d713271cd91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4783145a17a1233241a3766d9b259d16ed82b2076ff6790c1a368d713271cd91\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0bb5fec15ae4eb585d8ba1c0bedabe62a4094ca50044a09b44439dc555f064d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0bb5fec15ae4eb585d8ba1c0bedabe62a4094ca50044a09b44439dc555f064d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c153df9063b1a1e897e8d02e139d641d00d84dce42004be4c69dd55d44e7436b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c153df9063b1a1e897e8d02e139d641d00d84dce42004be4c69dd55d44e7436b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9acf5d7a17bf6bb6d1ad415d830594b8f6b986fbb7c58706b19d28546dfae6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9acf5d7a17bf6bb6d1ad415d830594b8f6b986fbb7c58706b19d28546dfae6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bp485\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:22Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:22 crc kubenswrapper[4911]: I0929 21:26:22.822010 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dz6zq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c16af8d-e647-4820-b96f-298cce113ab1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e64c28e47bb861649d58b34f5e3c369cfaf8c808435b32a13ebfc2c02b7b6db9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbgg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dz6zq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:22Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:22 crc kubenswrapper[4911]: I0929 21:26:22.836927 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w647f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50640abc-40db-4390-82d1-f3cfc76da71c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0479676aa753b6e60f9a57eb2a3055ddc5b10bce1547ac60e216c0865e79f24b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://568d099d7beea914cf9f9eb703e36dd179359d3f4406bae454334e39b0de79ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w647f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:22Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:22 crc kubenswrapper[4911]: I0929 21:26:22.851726 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:22 crc kubenswrapper[4911]: I0929 21:26:22.851858 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:22 crc kubenswrapper[4911]: I0929 21:26:22.851880 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:22 crc kubenswrapper[4911]: I0929 21:26:22.851950 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:22 crc kubenswrapper[4911]: I0929 21:26:22.851970 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:22Z","lastTransitionTime":"2025-09-29T21:26:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:22 crc kubenswrapper[4911]: I0929 21:26:22.855263 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lrfbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1179c900-e866-4c5a-bb06-6032cc03a075\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bb16b2f889ccaf66b96ffea2f6585967c14530a95a95000b652ce817e97c37e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jwhfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lrfbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:22Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:22 crc kubenswrapper[4911]: I0929 21:26:22.881931 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e3aa70f-b0da-44c9-a850-96d4494b02fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3568f71a4a1762e0d9be8ce4c317471f3f99a7c5b92b7daeb9fbace778f7e66b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00681163bfe474dabf977e0c43ac71148c2a472efd4bc57f3d145ba1134a74aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db1941e9959584d925b6ac546c234ce0dfaa2960a1277f282b51deb5ee56ec87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e87c1e61cde26c6ae4496e13ebe8e46857c8656720399f3d89b9e9a75bb4681d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c06f6555e780f2f9f69cabcd3e26595acfde3e973387ed7b10345c673c17f5d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c62aa953604f4ce35ca351ecf4e03da067af96f004c3ba3ea5f3a54b89def33d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de700bedd50f64841f1af98c4ba1a6b209e5990da83985a88e512b3079391876\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de700bedd50f64841f1af98c4ba1a6b209e5990da83985a88e512b3079391876\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T21:26:11Z\\\",\\\"message\\\":\\\"troller\\\\nI0929 21:26:11.697630 6582 admin_network_policy_namespace.go:56] Finished syncing Namespace openshift-service-ca-operator Admin Network Policy controller: took 9.93µs\\\\nI0929 21:26:11.697571 6582 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0929 21:26:11.697666 6582 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0929 21:26:11.697693 6582 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0929 21:26:11.697699 6582 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0929 21:26:11.697816 6582 factory.go:656] Stopping watch factory\\\\nI0929 21:26:11.697792 6582 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0929 21:26:11.697812 6582 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0929 21:26:11.697832 6582 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0929 21:26:11.697843 6582 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0929 21:26:11.697940 6582 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0929 21:26:11.698057 6582 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0929 21:26:11.698113 6582 ovnkube.go:599] Stopped ovnkube\\\\nI0929 21:26:11.698146 6582 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0929 21:26:11.698263 6582 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:26:10Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-9wxd8_openshift-ovn-kubernetes(4e3aa70f-b0da-44c9-a850-96d4494b02fc)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68ec54cc6e84d45b9550a2dcbd53b3ffc8cc97cb94da8d9961b29965af08b7e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9wxd8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:22Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:22 crc kubenswrapper[4911]: I0929 21:26:22.897833 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-d5gdh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b53f9593-39bf-43e0-b1de-09192d0167cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-24xkh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-24xkh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:57Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-d5gdh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:22Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:22 crc kubenswrapper[4911]: I0929 21:26:22.921123 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f56832a-7fd7-428e-85ee-55c5dea7b67a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ff3cf4013e69405637fe1483d5eb8ab2d12a0ba222c8446d93445dcb94ac9b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b32578d3e75bb839ad134f88850ba1b26e57846431fac71b64c5ddbce802464\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4539e6f57be0c686d5ea6ccbf1537c7c93698f81d2fdcfe5a143ce1f014e09\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57c9f68393ebed4fd5fe219544b0585a743989f8c54239e19ec702bddf9c30f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9dfa222360c949632050bd1dc5453a14c0dfa0d4614282132ceea2297405443e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T21:25:36Z\\\",\\\"message\\\":\\\"W0929 21:25:25.867198 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 21:25:25.867655 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759181125 cert, and key in /tmp/serving-cert-2091738680/serving-signer.crt, /tmp/serving-cert-2091738680/serving-signer.key\\\\nI0929 21:25:26.230899 1 observer_polling.go:159] Starting file observer\\\\nW0929 21:25:26.235864 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 21:25:26.236118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 21:25:26.238856 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2091738680/tls.crt::/tmp/serving-cert-2091738680/tls.key\\\\\\\"\\\\nF0929 21:25:36.606155 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47cf5d1e65f887e84d3c0694d5038edf3451c4af14d08a29d48562d05e36ea26\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:22Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:22 crc kubenswrapper[4911]: I0929 21:26:22.941162 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8646497-f7ad-4b38-a69f-9a14345af1c8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d3bb437bdac371dd4e22f20f304a5381c128ac2b774c51bc4f748a1c7aa3834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://129d51f84c8798ed7b1078f34899d4231c7bf4e1a75212f694c6c027b8463b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://290e37c77297fae7dcb3540911f6b2917e26daee3623c9106e34de394ccfa01f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7088f5a3752b8778de247eacb408d511197d1501e8e2811a98a9b713ca1fca04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:22Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:22 crc kubenswrapper[4911]: I0929 21:26:22.955917 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:22 crc kubenswrapper[4911]: I0929 21:26:22.955951 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:22 crc kubenswrapper[4911]: I0929 21:26:22.955961 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:22 crc kubenswrapper[4911]: I0929 21:26:22.955980 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:22 crc kubenswrapper[4911]: I0929 21:26:22.955990 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:22Z","lastTransitionTime":"2025-09-29T21:26:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:22 crc kubenswrapper[4911]: I0929 21:26:22.960308 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:22Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:22 crc kubenswrapper[4911]: I0929 21:26:22.976210 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:22Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:22 crc kubenswrapper[4911]: I0929 21:26:22.995257 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4052e710e0a06bf38d26e6006248d090051284bc0eae7a68b17241ef8016909\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a7d7fad5efb67cc7ef796977f03cc9c0e69fcc4340ca28749fffcc13236ed8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:22Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:23 crc kubenswrapper[4911]: I0929 21:26:23.014675 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a2ba6d0-d11e-4f12-b9d7-f9a5b97d306a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9e660ab2714b6ff9ddefb3634d2ae48dabf0a144b5f9ba96d429654fde989f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed4c0d97fe39092f5b2cf1e4575d8ea9238b60085270aec20f28727379f9a1a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4aab1c186df02eac1c6a9a54cf66510d44e4c63bad2da3cdbe53923869cc01cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7e8a2aec2b50058d3e0f16b0756724feebf54c180e50dbbec1dc319c31234bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7e8a2aec2b50058d3e0f16b0756724feebf54c180e50dbbec1dc319c31234bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:23Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:23 crc kubenswrapper[4911]: I0929 21:26:23.030487 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:23Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:23 crc kubenswrapper[4911]: I0929 21:26:23.049484 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a92f3bb864cba0c6615e8a07233e923c0d0681e83ad6722aa7c7043fb5966d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:23Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:23 crc kubenswrapper[4911]: I0929 21:26:23.059681 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:23 crc kubenswrapper[4911]: I0929 21:26:23.059737 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:23 crc kubenswrapper[4911]: I0929 21:26:23.059757 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:23 crc kubenswrapper[4911]: I0929 21:26:23.059783 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:23 crc kubenswrapper[4911]: I0929 21:26:23.059839 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:23Z","lastTransitionTime":"2025-09-29T21:26:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:23 crc kubenswrapper[4911]: I0929 21:26:23.076175 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cfbgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5b94596-b945-4a89-b362-ec649e8e7981\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5154aa6a1a3a9b1a5a58752607c01b955e28b7e896bf9b99b5cca2448077ebd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm7xf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1f8c9f36d4fa9ccfe9c4fdf2794b819a0de928056fc3c320b6c83f9c518258f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm7xf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:56Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-cfbgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:23Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:23 crc kubenswrapper[4911]: I0929 21:26:23.164155 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:23 crc kubenswrapper[4911]: I0929 21:26:23.164201 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:23 crc kubenswrapper[4911]: I0929 21:26:23.164218 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:23 crc kubenswrapper[4911]: I0929 21:26:23.164242 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:23 crc kubenswrapper[4911]: I0929 21:26:23.164259 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:23Z","lastTransitionTime":"2025-09-29T21:26:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:23 crc kubenswrapper[4911]: I0929 21:26:23.267099 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:23 crc kubenswrapper[4911]: I0929 21:26:23.267170 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:23 crc kubenswrapper[4911]: I0929 21:26:23.267193 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:23 crc kubenswrapper[4911]: I0929 21:26:23.267222 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:23 crc kubenswrapper[4911]: I0929 21:26:23.267241 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:23Z","lastTransitionTime":"2025-09-29T21:26:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:23 crc kubenswrapper[4911]: I0929 21:26:23.370475 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:23 crc kubenswrapper[4911]: I0929 21:26:23.370525 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:23 crc kubenswrapper[4911]: I0929 21:26:23.370536 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:23 crc kubenswrapper[4911]: I0929 21:26:23.370555 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:23 crc kubenswrapper[4911]: I0929 21:26:23.370567 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:23Z","lastTransitionTime":"2025-09-29T21:26:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:23 crc kubenswrapper[4911]: I0929 21:26:23.474246 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:23 crc kubenswrapper[4911]: I0929 21:26:23.474310 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:23 crc kubenswrapper[4911]: I0929 21:26:23.474332 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:23 crc kubenswrapper[4911]: I0929 21:26:23.474361 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:23 crc kubenswrapper[4911]: I0929 21:26:23.474380 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:23Z","lastTransitionTime":"2025-09-29T21:26:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:23 crc kubenswrapper[4911]: I0929 21:26:23.577636 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:23 crc kubenswrapper[4911]: I0929 21:26:23.577698 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:23 crc kubenswrapper[4911]: I0929 21:26:23.577708 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:23 crc kubenswrapper[4911]: I0929 21:26:23.577727 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:23 crc kubenswrapper[4911]: I0929 21:26:23.577740 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:23Z","lastTransitionTime":"2025-09-29T21:26:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:23 crc kubenswrapper[4911]: I0929 21:26:23.680562 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:23 crc kubenswrapper[4911]: I0929 21:26:23.680625 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:23 crc kubenswrapper[4911]: I0929 21:26:23.680643 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:23 crc kubenswrapper[4911]: I0929 21:26:23.680669 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:23 crc kubenswrapper[4911]: I0929 21:26:23.680689 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:23Z","lastTransitionTime":"2025-09-29T21:26:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:23 crc kubenswrapper[4911]: I0929 21:26:23.700969 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 21:26:23 crc kubenswrapper[4911]: E0929 21:26:23.701157 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 21:26:23 crc kubenswrapper[4911]: I0929 21:26:23.784184 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:23 crc kubenswrapper[4911]: I0929 21:26:23.784490 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:23 crc kubenswrapper[4911]: I0929 21:26:23.784565 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:23 crc kubenswrapper[4911]: I0929 21:26:23.784653 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:23 crc kubenswrapper[4911]: I0929 21:26:23.784741 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:23Z","lastTransitionTime":"2025-09-29T21:26:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:23 crc kubenswrapper[4911]: I0929 21:26:23.887863 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:23 crc kubenswrapper[4911]: I0929 21:26:23.887921 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:23 crc kubenswrapper[4911]: I0929 21:26:23.887943 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:23 crc kubenswrapper[4911]: I0929 21:26:23.887968 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:23 crc kubenswrapper[4911]: I0929 21:26:23.887986 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:23Z","lastTransitionTime":"2025-09-29T21:26:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:23 crc kubenswrapper[4911]: I0929 21:26:23.991624 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:23 crc kubenswrapper[4911]: I0929 21:26:23.992273 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:23 crc kubenswrapper[4911]: I0929 21:26:23.992380 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:23 crc kubenswrapper[4911]: I0929 21:26:23.992483 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:23 crc kubenswrapper[4911]: I0929 21:26:23.992556 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:23Z","lastTransitionTime":"2025-09-29T21:26:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:24 crc kubenswrapper[4911]: I0929 21:26:24.095483 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:24 crc kubenswrapper[4911]: I0929 21:26:24.095552 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:24 crc kubenswrapper[4911]: I0929 21:26:24.095573 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:24 crc kubenswrapper[4911]: I0929 21:26:24.095600 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:24 crc kubenswrapper[4911]: I0929 21:26:24.095619 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:24Z","lastTransitionTime":"2025-09-29T21:26:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:24 crc kubenswrapper[4911]: I0929 21:26:24.198319 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:24 crc kubenswrapper[4911]: I0929 21:26:24.198389 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:24 crc kubenswrapper[4911]: I0929 21:26:24.198405 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:24 crc kubenswrapper[4911]: I0929 21:26:24.198426 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:24 crc kubenswrapper[4911]: I0929 21:26:24.198437 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:24Z","lastTransitionTime":"2025-09-29T21:26:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:24 crc kubenswrapper[4911]: I0929 21:26:24.300881 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:24 crc kubenswrapper[4911]: I0929 21:26:24.300956 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:24 crc kubenswrapper[4911]: I0929 21:26:24.300970 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:24 crc kubenswrapper[4911]: I0929 21:26:24.300993 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:24 crc kubenswrapper[4911]: I0929 21:26:24.301006 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:24Z","lastTransitionTime":"2025-09-29T21:26:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:24 crc kubenswrapper[4911]: I0929 21:26:24.404600 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:24 crc kubenswrapper[4911]: I0929 21:26:24.404931 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:24 crc kubenswrapper[4911]: I0929 21:26:24.405058 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:24 crc kubenswrapper[4911]: I0929 21:26:24.405144 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:24 crc kubenswrapper[4911]: I0929 21:26:24.405218 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:24Z","lastTransitionTime":"2025-09-29T21:26:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:24 crc kubenswrapper[4911]: I0929 21:26:24.507948 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:24 crc kubenswrapper[4911]: I0929 21:26:24.508354 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:24 crc kubenswrapper[4911]: I0929 21:26:24.508491 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:24 crc kubenswrapper[4911]: I0929 21:26:24.508655 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:24 crc kubenswrapper[4911]: I0929 21:26:24.508769 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:24Z","lastTransitionTime":"2025-09-29T21:26:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:24 crc kubenswrapper[4911]: I0929 21:26:24.611366 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:24 crc kubenswrapper[4911]: I0929 21:26:24.611407 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:24 crc kubenswrapper[4911]: I0929 21:26:24.611419 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:24 crc kubenswrapper[4911]: I0929 21:26:24.611438 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:24 crc kubenswrapper[4911]: I0929 21:26:24.611452 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:24Z","lastTransitionTime":"2025-09-29T21:26:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:24 crc kubenswrapper[4911]: I0929 21:26:24.700402 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:26:24 crc kubenswrapper[4911]: I0929 21:26:24.700515 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 21:26:24 crc kubenswrapper[4911]: I0929 21:26:24.700573 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-d5gdh" Sep 29 21:26:24 crc kubenswrapper[4911]: E0929 21:26:24.700690 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 21:26:24 crc kubenswrapper[4911]: E0929 21:26:24.700987 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-d5gdh" podUID="b53f9593-39bf-43e0-b1de-09192d0167cd" Sep 29 21:26:24 crc kubenswrapper[4911]: E0929 21:26:24.701574 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 21:26:24 crc kubenswrapper[4911]: I0929 21:26:24.702052 4911 scope.go:117] "RemoveContainer" containerID="de700bedd50f64841f1af98c4ba1a6b209e5990da83985a88e512b3079391876" Sep 29 21:26:24 crc kubenswrapper[4911]: E0929 21:26:24.702192 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-9wxd8_openshift-ovn-kubernetes(4e3aa70f-b0da-44c9-a850-96d4494b02fc)\"" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" podUID="4e3aa70f-b0da-44c9-a850-96d4494b02fc" Sep 29 21:26:24 crc kubenswrapper[4911]: I0929 21:26:24.713828 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:24 crc kubenswrapper[4911]: I0929 21:26:24.713862 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:24 crc kubenswrapper[4911]: I0929 21:26:24.713871 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:24 crc kubenswrapper[4911]: I0929 21:26:24.713887 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:24 crc kubenswrapper[4911]: I0929 21:26:24.713898 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:24Z","lastTransitionTime":"2025-09-29T21:26:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:24 crc kubenswrapper[4911]: I0929 21:26:24.816812 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:24 crc kubenswrapper[4911]: I0929 21:26:24.816849 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:24 crc kubenswrapper[4911]: I0929 21:26:24.816859 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:24 crc kubenswrapper[4911]: I0929 21:26:24.816874 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:24 crc kubenswrapper[4911]: I0929 21:26:24.816884 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:24Z","lastTransitionTime":"2025-09-29T21:26:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:24 crc kubenswrapper[4911]: I0929 21:26:24.920268 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:24 crc kubenswrapper[4911]: I0929 21:26:24.920325 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:24 crc kubenswrapper[4911]: I0929 21:26:24.920336 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:24 crc kubenswrapper[4911]: I0929 21:26:24.920355 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:24 crc kubenswrapper[4911]: I0929 21:26:24.920367 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:24Z","lastTransitionTime":"2025-09-29T21:26:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:25 crc kubenswrapper[4911]: I0929 21:26:25.023690 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:25 crc kubenswrapper[4911]: I0929 21:26:25.023745 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:25 crc kubenswrapper[4911]: I0929 21:26:25.023759 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:25 crc kubenswrapper[4911]: I0929 21:26:25.023780 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:25 crc kubenswrapper[4911]: I0929 21:26:25.023818 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:25Z","lastTransitionTime":"2025-09-29T21:26:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:25 crc kubenswrapper[4911]: I0929 21:26:25.126289 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:25 crc kubenswrapper[4911]: I0929 21:26:25.126597 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:25 crc kubenswrapper[4911]: I0929 21:26:25.126776 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:25 crc kubenswrapper[4911]: I0929 21:26:25.126975 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:25 crc kubenswrapper[4911]: I0929 21:26:25.127144 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:25Z","lastTransitionTime":"2025-09-29T21:26:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:25 crc kubenswrapper[4911]: I0929 21:26:25.230429 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:25 crc kubenswrapper[4911]: I0929 21:26:25.230506 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:25 crc kubenswrapper[4911]: I0929 21:26:25.230525 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:25 crc kubenswrapper[4911]: I0929 21:26:25.230556 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:25 crc kubenswrapper[4911]: I0929 21:26:25.230585 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:25Z","lastTransitionTime":"2025-09-29T21:26:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:25 crc kubenswrapper[4911]: I0929 21:26:25.333562 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:25 crc kubenswrapper[4911]: I0929 21:26:25.333643 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:25 crc kubenswrapper[4911]: I0929 21:26:25.333665 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:25 crc kubenswrapper[4911]: I0929 21:26:25.333698 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:25 crc kubenswrapper[4911]: I0929 21:26:25.333720 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:25Z","lastTransitionTime":"2025-09-29T21:26:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:25 crc kubenswrapper[4911]: I0929 21:26:25.436895 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:25 crc kubenswrapper[4911]: I0929 21:26:25.436951 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:25 crc kubenswrapper[4911]: I0929 21:26:25.436961 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:25 crc kubenswrapper[4911]: I0929 21:26:25.436982 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:25 crc kubenswrapper[4911]: I0929 21:26:25.436996 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:25Z","lastTransitionTime":"2025-09-29T21:26:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:25 crc kubenswrapper[4911]: I0929 21:26:25.540489 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:25 crc kubenswrapper[4911]: I0929 21:26:25.540561 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:25 crc kubenswrapper[4911]: I0929 21:26:25.540582 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:25 crc kubenswrapper[4911]: I0929 21:26:25.540611 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:25 crc kubenswrapper[4911]: I0929 21:26:25.540646 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:25Z","lastTransitionTime":"2025-09-29T21:26:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:25 crc kubenswrapper[4911]: I0929 21:26:25.644617 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:25 crc kubenswrapper[4911]: I0929 21:26:25.644689 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:25 crc kubenswrapper[4911]: I0929 21:26:25.644713 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:25 crc kubenswrapper[4911]: I0929 21:26:25.644779 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:25 crc kubenswrapper[4911]: I0929 21:26:25.644898 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:25Z","lastTransitionTime":"2025-09-29T21:26:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:25 crc kubenswrapper[4911]: I0929 21:26:25.700489 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 21:26:25 crc kubenswrapper[4911]: E0929 21:26:25.700752 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 21:26:25 crc kubenswrapper[4911]: I0929 21:26:25.747107 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:25 crc kubenswrapper[4911]: I0929 21:26:25.747150 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:25 crc kubenswrapper[4911]: I0929 21:26:25.747160 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:25 crc kubenswrapper[4911]: I0929 21:26:25.747177 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:25 crc kubenswrapper[4911]: I0929 21:26:25.747190 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:25Z","lastTransitionTime":"2025-09-29T21:26:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:25 crc kubenswrapper[4911]: I0929 21:26:25.849301 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:25 crc kubenswrapper[4911]: I0929 21:26:25.849346 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:25 crc kubenswrapper[4911]: I0929 21:26:25.849357 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:25 crc kubenswrapper[4911]: I0929 21:26:25.849375 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:25 crc kubenswrapper[4911]: I0929 21:26:25.849384 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:25Z","lastTransitionTime":"2025-09-29T21:26:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:25 crc kubenswrapper[4911]: I0929 21:26:25.952457 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:25 crc kubenswrapper[4911]: I0929 21:26:25.952502 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:25 crc kubenswrapper[4911]: I0929 21:26:25.952512 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:25 crc kubenswrapper[4911]: I0929 21:26:25.952534 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:25 crc kubenswrapper[4911]: I0929 21:26:25.952543 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:25Z","lastTransitionTime":"2025-09-29T21:26:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:26 crc kubenswrapper[4911]: I0929 21:26:26.055109 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:26 crc kubenswrapper[4911]: I0929 21:26:26.055156 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:26 crc kubenswrapper[4911]: I0929 21:26:26.055168 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:26 crc kubenswrapper[4911]: I0929 21:26:26.055188 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:26 crc kubenswrapper[4911]: I0929 21:26:26.055202 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:26Z","lastTransitionTime":"2025-09-29T21:26:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:26 crc kubenswrapper[4911]: I0929 21:26:26.158456 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:26 crc kubenswrapper[4911]: I0929 21:26:26.158508 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:26 crc kubenswrapper[4911]: I0929 21:26:26.158522 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:26 crc kubenswrapper[4911]: I0929 21:26:26.158548 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:26 crc kubenswrapper[4911]: I0929 21:26:26.158564 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:26Z","lastTransitionTime":"2025-09-29T21:26:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:26 crc kubenswrapper[4911]: I0929 21:26:26.261052 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:26 crc kubenswrapper[4911]: I0929 21:26:26.261097 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:26 crc kubenswrapper[4911]: I0929 21:26:26.261109 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:26 crc kubenswrapper[4911]: I0929 21:26:26.261149 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:26 crc kubenswrapper[4911]: I0929 21:26:26.261160 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:26Z","lastTransitionTime":"2025-09-29T21:26:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:26 crc kubenswrapper[4911]: I0929 21:26:26.365499 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:26 crc kubenswrapper[4911]: I0929 21:26:26.365545 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:26 crc kubenswrapper[4911]: I0929 21:26:26.365557 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:26 crc kubenswrapper[4911]: I0929 21:26:26.365575 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:26 crc kubenswrapper[4911]: I0929 21:26:26.365586 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:26Z","lastTransitionTime":"2025-09-29T21:26:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:26 crc kubenswrapper[4911]: I0929 21:26:26.468893 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:26 crc kubenswrapper[4911]: I0929 21:26:26.469352 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:26 crc kubenswrapper[4911]: I0929 21:26:26.469718 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:26 crc kubenswrapper[4911]: I0929 21:26:26.470197 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:26 crc kubenswrapper[4911]: I0929 21:26:26.470526 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:26Z","lastTransitionTime":"2025-09-29T21:26:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:26 crc kubenswrapper[4911]: I0929 21:26:26.573760 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:26 crc kubenswrapper[4911]: I0929 21:26:26.574112 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:26 crc kubenswrapper[4911]: I0929 21:26:26.574214 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:26 crc kubenswrapper[4911]: I0929 21:26:26.574286 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:26 crc kubenswrapper[4911]: I0929 21:26:26.574357 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:26Z","lastTransitionTime":"2025-09-29T21:26:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:26 crc kubenswrapper[4911]: I0929 21:26:26.677457 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:26 crc kubenswrapper[4911]: I0929 21:26:26.677734 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:26 crc kubenswrapper[4911]: I0929 21:26:26.677824 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:26 crc kubenswrapper[4911]: I0929 21:26:26.677910 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:26 crc kubenswrapper[4911]: I0929 21:26:26.677976 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:26Z","lastTransitionTime":"2025-09-29T21:26:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:26 crc kubenswrapper[4911]: I0929 21:26:26.701137 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:26:26 crc kubenswrapper[4911]: I0929 21:26:26.701165 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-d5gdh" Sep 29 21:26:26 crc kubenswrapper[4911]: I0929 21:26:26.701607 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 21:26:26 crc kubenswrapper[4911]: E0929 21:26:26.701767 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 21:26:26 crc kubenswrapper[4911]: E0929 21:26:26.701912 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-d5gdh" podUID="b53f9593-39bf-43e0-b1de-09192d0167cd" Sep 29 21:26:26 crc kubenswrapper[4911]: E0929 21:26:26.702137 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 21:26:26 crc kubenswrapper[4911]: I0929 21:26:26.781502 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:26 crc kubenswrapper[4911]: I0929 21:26:26.781560 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:26 crc kubenswrapper[4911]: I0929 21:26:26.781576 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:26 crc kubenswrapper[4911]: I0929 21:26:26.781602 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:26 crc kubenswrapper[4911]: I0929 21:26:26.781624 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:26Z","lastTransitionTime":"2025-09-29T21:26:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:26 crc kubenswrapper[4911]: I0929 21:26:26.884070 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:26 crc kubenswrapper[4911]: I0929 21:26:26.884113 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:26 crc kubenswrapper[4911]: I0929 21:26:26.884125 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:26 crc kubenswrapper[4911]: I0929 21:26:26.884149 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:26 crc kubenswrapper[4911]: I0929 21:26:26.884162 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:26Z","lastTransitionTime":"2025-09-29T21:26:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:26 crc kubenswrapper[4911]: I0929 21:26:26.987169 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:26 crc kubenswrapper[4911]: I0929 21:26:26.987241 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:26 crc kubenswrapper[4911]: I0929 21:26:26.987261 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:26 crc kubenswrapper[4911]: I0929 21:26:26.987290 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:26 crc kubenswrapper[4911]: I0929 21:26:26.987311 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:26Z","lastTransitionTime":"2025-09-29T21:26:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:27 crc kubenswrapper[4911]: I0929 21:26:27.089988 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:27 crc kubenswrapper[4911]: I0929 21:26:27.090650 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:27 crc kubenswrapper[4911]: I0929 21:26:27.090728 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:27 crc kubenswrapper[4911]: I0929 21:26:27.090823 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:27 crc kubenswrapper[4911]: I0929 21:26:27.090893 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:27Z","lastTransitionTime":"2025-09-29T21:26:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:27 crc kubenswrapper[4911]: I0929 21:26:27.193251 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:27 crc kubenswrapper[4911]: I0929 21:26:27.193283 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:27 crc kubenswrapper[4911]: I0929 21:26:27.193294 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:27 crc kubenswrapper[4911]: I0929 21:26:27.193307 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:27 crc kubenswrapper[4911]: I0929 21:26:27.193316 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:27Z","lastTransitionTime":"2025-09-29T21:26:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:27 crc kubenswrapper[4911]: I0929 21:26:27.296211 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:27 crc kubenswrapper[4911]: I0929 21:26:27.296264 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:27 crc kubenswrapper[4911]: I0929 21:26:27.296281 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:27 crc kubenswrapper[4911]: I0929 21:26:27.296306 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:27 crc kubenswrapper[4911]: I0929 21:26:27.296323 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:27Z","lastTransitionTime":"2025-09-29T21:26:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:27 crc kubenswrapper[4911]: I0929 21:26:27.399597 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:27 crc kubenswrapper[4911]: I0929 21:26:27.399674 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:27 crc kubenswrapper[4911]: I0929 21:26:27.399717 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:27 crc kubenswrapper[4911]: I0929 21:26:27.399752 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:27 crc kubenswrapper[4911]: I0929 21:26:27.399776 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:27Z","lastTransitionTime":"2025-09-29T21:26:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:27 crc kubenswrapper[4911]: I0929 21:26:27.503145 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:27 crc kubenswrapper[4911]: I0929 21:26:27.503181 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:27 crc kubenswrapper[4911]: I0929 21:26:27.503190 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:27 crc kubenswrapper[4911]: I0929 21:26:27.503204 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:27 crc kubenswrapper[4911]: I0929 21:26:27.503215 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:27Z","lastTransitionTime":"2025-09-29T21:26:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:27 crc kubenswrapper[4911]: I0929 21:26:27.605817 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:27 crc kubenswrapper[4911]: I0929 21:26:27.606299 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:27 crc kubenswrapper[4911]: I0929 21:26:27.606389 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:27 crc kubenswrapper[4911]: I0929 21:26:27.606474 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:27 crc kubenswrapper[4911]: I0929 21:26:27.606556 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:27Z","lastTransitionTime":"2025-09-29T21:26:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:27 crc kubenswrapper[4911]: I0929 21:26:27.700569 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 21:26:27 crc kubenswrapper[4911]: E0929 21:26:27.700841 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 21:26:27 crc kubenswrapper[4911]: I0929 21:26:27.709656 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:27 crc kubenswrapper[4911]: I0929 21:26:27.709699 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:27 crc kubenswrapper[4911]: I0929 21:26:27.709710 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:27 crc kubenswrapper[4911]: I0929 21:26:27.709727 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:27 crc kubenswrapper[4911]: I0929 21:26:27.709738 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:27Z","lastTransitionTime":"2025-09-29T21:26:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:27 crc kubenswrapper[4911]: I0929 21:26:27.812688 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:27 crc kubenswrapper[4911]: I0929 21:26:27.812736 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:27 crc kubenswrapper[4911]: I0929 21:26:27.812750 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:27 crc kubenswrapper[4911]: I0929 21:26:27.812769 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:27 crc kubenswrapper[4911]: I0929 21:26:27.812780 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:27Z","lastTransitionTime":"2025-09-29T21:26:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:27 crc kubenswrapper[4911]: I0929 21:26:27.916372 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:27 crc kubenswrapper[4911]: I0929 21:26:27.916435 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:27 crc kubenswrapper[4911]: I0929 21:26:27.916449 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:27 crc kubenswrapper[4911]: I0929 21:26:27.916470 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:27 crc kubenswrapper[4911]: I0929 21:26:27.916483 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:27Z","lastTransitionTime":"2025-09-29T21:26:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:28 crc kubenswrapper[4911]: I0929 21:26:28.018621 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:28 crc kubenswrapper[4911]: I0929 21:26:28.018666 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:28 crc kubenswrapper[4911]: I0929 21:26:28.018678 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:28 crc kubenswrapper[4911]: I0929 21:26:28.018697 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:28 crc kubenswrapper[4911]: I0929 21:26:28.018715 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:28Z","lastTransitionTime":"2025-09-29T21:26:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:28 crc kubenswrapper[4911]: I0929 21:26:28.121623 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:28 crc kubenswrapper[4911]: I0929 21:26:28.121678 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:28 crc kubenswrapper[4911]: I0929 21:26:28.121689 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:28 crc kubenswrapper[4911]: I0929 21:26:28.121709 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:28 crc kubenswrapper[4911]: I0929 21:26:28.121722 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:28Z","lastTransitionTime":"2025-09-29T21:26:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:28 crc kubenswrapper[4911]: I0929 21:26:28.224823 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:28 crc kubenswrapper[4911]: I0929 21:26:28.224887 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:28 crc kubenswrapper[4911]: I0929 21:26:28.224904 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:28 crc kubenswrapper[4911]: I0929 21:26:28.224926 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:28 crc kubenswrapper[4911]: I0929 21:26:28.224943 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:28Z","lastTransitionTime":"2025-09-29T21:26:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:28 crc kubenswrapper[4911]: I0929 21:26:28.327822 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:28 crc kubenswrapper[4911]: I0929 21:26:28.327891 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:28 crc kubenswrapper[4911]: I0929 21:26:28.327904 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:28 crc kubenswrapper[4911]: I0929 21:26:28.327921 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:28 crc kubenswrapper[4911]: I0929 21:26:28.327933 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:28Z","lastTransitionTime":"2025-09-29T21:26:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:28 crc kubenswrapper[4911]: I0929 21:26:28.431669 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:28 crc kubenswrapper[4911]: I0929 21:26:28.431726 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:28 crc kubenswrapper[4911]: I0929 21:26:28.431735 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:28 crc kubenswrapper[4911]: I0929 21:26:28.431754 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:28 crc kubenswrapper[4911]: I0929 21:26:28.431764 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:28Z","lastTransitionTime":"2025-09-29T21:26:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:28 crc kubenswrapper[4911]: I0929 21:26:28.535409 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:28 crc kubenswrapper[4911]: I0929 21:26:28.535472 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:28 crc kubenswrapper[4911]: I0929 21:26:28.535494 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:28 crc kubenswrapper[4911]: I0929 21:26:28.535523 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:28 crc kubenswrapper[4911]: I0929 21:26:28.535545 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:28Z","lastTransitionTime":"2025-09-29T21:26:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:28 crc kubenswrapper[4911]: I0929 21:26:28.638274 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:28 crc kubenswrapper[4911]: I0929 21:26:28.638322 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:28 crc kubenswrapper[4911]: I0929 21:26:28.638337 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:28 crc kubenswrapper[4911]: I0929 21:26:28.638357 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:28 crc kubenswrapper[4911]: I0929 21:26:28.638370 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:28Z","lastTransitionTime":"2025-09-29T21:26:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:28 crc kubenswrapper[4911]: I0929 21:26:28.700183 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:26:28 crc kubenswrapper[4911]: I0929 21:26:28.700185 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 21:26:28 crc kubenswrapper[4911]: I0929 21:26:28.700329 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-d5gdh" Sep 29 21:26:28 crc kubenswrapper[4911]: E0929 21:26:28.700440 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 21:26:28 crc kubenswrapper[4911]: E0929 21:26:28.700548 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 21:26:28 crc kubenswrapper[4911]: E0929 21:26:28.700670 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-d5gdh" podUID="b53f9593-39bf-43e0-b1de-09192d0167cd" Sep 29 21:26:28 crc kubenswrapper[4911]: I0929 21:26:28.741058 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:28 crc kubenswrapper[4911]: I0929 21:26:28.741123 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:28 crc kubenswrapper[4911]: I0929 21:26:28.741135 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:28 crc kubenswrapper[4911]: I0929 21:26:28.741160 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:28 crc kubenswrapper[4911]: I0929 21:26:28.741169 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:28Z","lastTransitionTime":"2025-09-29T21:26:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:28 crc kubenswrapper[4911]: I0929 21:26:28.843771 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:28 crc kubenswrapper[4911]: I0929 21:26:28.843865 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:28 crc kubenswrapper[4911]: I0929 21:26:28.843885 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:28 crc kubenswrapper[4911]: I0929 21:26:28.843914 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:28 crc kubenswrapper[4911]: I0929 21:26:28.843935 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:28Z","lastTransitionTime":"2025-09-29T21:26:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:28 crc kubenswrapper[4911]: I0929 21:26:28.947200 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:28 crc kubenswrapper[4911]: I0929 21:26:28.947294 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:28 crc kubenswrapper[4911]: I0929 21:26:28.947362 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:28 crc kubenswrapper[4911]: I0929 21:26:28.947416 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:28 crc kubenswrapper[4911]: I0929 21:26:28.947446 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:28Z","lastTransitionTime":"2025-09-29T21:26:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:29 crc kubenswrapper[4911]: I0929 21:26:29.050908 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:29 crc kubenswrapper[4911]: I0929 21:26:29.050953 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:29 crc kubenswrapper[4911]: I0929 21:26:29.050970 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:29 crc kubenswrapper[4911]: I0929 21:26:29.050989 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:29 crc kubenswrapper[4911]: I0929 21:26:29.051003 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:29Z","lastTransitionTime":"2025-09-29T21:26:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:29 crc kubenswrapper[4911]: I0929 21:26:29.154009 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:29 crc kubenswrapper[4911]: I0929 21:26:29.154046 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:29 crc kubenswrapper[4911]: I0929 21:26:29.154060 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:29 crc kubenswrapper[4911]: I0929 21:26:29.154077 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:29 crc kubenswrapper[4911]: I0929 21:26:29.154088 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:29Z","lastTransitionTime":"2025-09-29T21:26:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:29 crc kubenswrapper[4911]: I0929 21:26:29.256927 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:29 crc kubenswrapper[4911]: I0929 21:26:29.256990 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:29 crc kubenswrapper[4911]: I0929 21:26:29.257011 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:29 crc kubenswrapper[4911]: I0929 21:26:29.257039 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:29 crc kubenswrapper[4911]: I0929 21:26:29.257059 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:29Z","lastTransitionTime":"2025-09-29T21:26:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:29 crc kubenswrapper[4911]: I0929 21:26:29.274545 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b53f9593-39bf-43e0-b1de-09192d0167cd-metrics-certs\") pod \"network-metrics-daemon-d5gdh\" (UID: \"b53f9593-39bf-43e0-b1de-09192d0167cd\") " pod="openshift-multus/network-metrics-daemon-d5gdh" Sep 29 21:26:29 crc kubenswrapper[4911]: E0929 21:26:29.274666 4911 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 21:26:29 crc kubenswrapper[4911]: E0929 21:26:29.274735 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b53f9593-39bf-43e0-b1de-09192d0167cd-metrics-certs podName:b53f9593-39bf-43e0-b1de-09192d0167cd nodeName:}" failed. No retries permitted until 2025-09-29 21:27:01.274715658 +0000 UTC m=+99.251828339 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/b53f9593-39bf-43e0-b1de-09192d0167cd-metrics-certs") pod "network-metrics-daemon-d5gdh" (UID: "b53f9593-39bf-43e0-b1de-09192d0167cd") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 21:26:29 crc kubenswrapper[4911]: I0929 21:26:29.360096 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:29 crc kubenswrapper[4911]: I0929 21:26:29.360168 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:29 crc kubenswrapper[4911]: I0929 21:26:29.360187 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:29 crc kubenswrapper[4911]: I0929 21:26:29.360212 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:29 crc kubenswrapper[4911]: I0929 21:26:29.360231 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:29Z","lastTransitionTime":"2025-09-29T21:26:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:29 crc kubenswrapper[4911]: I0929 21:26:29.462697 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:29 crc kubenswrapper[4911]: I0929 21:26:29.462768 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:29 crc kubenswrapper[4911]: I0929 21:26:29.462787 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:29 crc kubenswrapper[4911]: I0929 21:26:29.462841 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:29 crc kubenswrapper[4911]: I0929 21:26:29.462863 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:29Z","lastTransitionTime":"2025-09-29T21:26:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:29 crc kubenswrapper[4911]: I0929 21:26:29.565757 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:29 crc kubenswrapper[4911]: I0929 21:26:29.565871 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:29 crc kubenswrapper[4911]: I0929 21:26:29.565890 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:29 crc kubenswrapper[4911]: I0929 21:26:29.565917 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:29 crc kubenswrapper[4911]: I0929 21:26:29.565938 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:29Z","lastTransitionTime":"2025-09-29T21:26:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:29 crc kubenswrapper[4911]: I0929 21:26:29.668777 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:29 crc kubenswrapper[4911]: I0929 21:26:29.668860 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:29 crc kubenswrapper[4911]: I0929 21:26:29.668872 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:29 crc kubenswrapper[4911]: I0929 21:26:29.668890 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:29 crc kubenswrapper[4911]: I0929 21:26:29.668900 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:29Z","lastTransitionTime":"2025-09-29T21:26:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:29 crc kubenswrapper[4911]: I0929 21:26:29.700448 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 21:26:29 crc kubenswrapper[4911]: E0929 21:26:29.700622 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 21:26:29 crc kubenswrapper[4911]: I0929 21:26:29.743199 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:29 crc kubenswrapper[4911]: I0929 21:26:29.743247 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:29 crc kubenswrapper[4911]: I0929 21:26:29.743257 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:29 crc kubenswrapper[4911]: I0929 21:26:29.743276 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:29 crc kubenswrapper[4911]: I0929 21:26:29.743288 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:29Z","lastTransitionTime":"2025-09-29T21:26:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:29 crc kubenswrapper[4911]: E0929 21:26:29.758748 4911 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7dded9b5-8ab5-45b2-be5a-4613b6e8208f\\\",\\\"systemUUID\\\":\\\"6cb362cf-0841-40fb-a840-f46642f78745\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:29Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:29 crc kubenswrapper[4911]: I0929 21:26:29.769060 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:29 crc kubenswrapper[4911]: I0929 21:26:29.769152 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:29 crc kubenswrapper[4911]: I0929 21:26:29.769172 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:29 crc kubenswrapper[4911]: I0929 21:26:29.769230 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:29 crc kubenswrapper[4911]: I0929 21:26:29.769249 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:29Z","lastTransitionTime":"2025-09-29T21:26:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:29 crc kubenswrapper[4911]: E0929 21:26:29.784599 4911 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7dded9b5-8ab5-45b2-be5a-4613b6e8208f\\\",\\\"systemUUID\\\":\\\"6cb362cf-0841-40fb-a840-f46642f78745\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:29Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:29 crc kubenswrapper[4911]: I0929 21:26:29.790763 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:29 crc kubenswrapper[4911]: I0929 21:26:29.790844 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:29 crc kubenswrapper[4911]: I0929 21:26:29.790861 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:29 crc kubenswrapper[4911]: I0929 21:26:29.790884 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:29 crc kubenswrapper[4911]: I0929 21:26:29.790936 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:29Z","lastTransitionTime":"2025-09-29T21:26:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:29 crc kubenswrapper[4911]: E0929 21:26:29.804580 4911 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7dded9b5-8ab5-45b2-be5a-4613b6e8208f\\\",\\\"systemUUID\\\":\\\"6cb362cf-0841-40fb-a840-f46642f78745\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:29Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:29 crc kubenswrapper[4911]: I0929 21:26:29.810175 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:29 crc kubenswrapper[4911]: I0929 21:26:29.810246 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:29 crc kubenswrapper[4911]: I0929 21:26:29.810266 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:29 crc kubenswrapper[4911]: I0929 21:26:29.810293 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:29 crc kubenswrapper[4911]: I0929 21:26:29.810309 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:29Z","lastTransitionTime":"2025-09-29T21:26:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:29 crc kubenswrapper[4911]: E0929 21:26:29.823429 4911 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7dded9b5-8ab5-45b2-be5a-4613b6e8208f\\\",\\\"systemUUID\\\":\\\"6cb362cf-0841-40fb-a840-f46642f78745\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:29Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:29 crc kubenswrapper[4911]: I0929 21:26:29.828130 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:29 crc kubenswrapper[4911]: I0929 21:26:29.828254 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:29 crc kubenswrapper[4911]: I0929 21:26:29.828326 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:29 crc kubenswrapper[4911]: I0929 21:26:29.828391 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:29 crc kubenswrapper[4911]: I0929 21:26:29.828470 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:29Z","lastTransitionTime":"2025-09-29T21:26:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:29 crc kubenswrapper[4911]: E0929 21:26:29.840034 4911 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7dded9b5-8ab5-45b2-be5a-4613b6e8208f\\\",\\\"systemUUID\\\":\\\"6cb362cf-0841-40fb-a840-f46642f78745\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:29Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:29 crc kubenswrapper[4911]: E0929 21:26:29.840981 4911 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 29 21:26:29 crc kubenswrapper[4911]: I0929 21:26:29.843128 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:29 crc kubenswrapper[4911]: I0929 21:26:29.843165 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:29 crc kubenswrapper[4911]: I0929 21:26:29.843177 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:29 crc kubenswrapper[4911]: I0929 21:26:29.843194 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:29 crc kubenswrapper[4911]: I0929 21:26:29.843208 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:29Z","lastTransitionTime":"2025-09-29T21:26:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:29 crc kubenswrapper[4911]: I0929 21:26:29.945817 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:29 crc kubenswrapper[4911]: I0929 21:26:29.945870 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:29 crc kubenswrapper[4911]: I0929 21:26:29.945880 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:29 crc kubenswrapper[4911]: I0929 21:26:29.945897 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:29 crc kubenswrapper[4911]: I0929 21:26:29.945910 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:29Z","lastTransitionTime":"2025-09-29T21:26:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:30 crc kubenswrapper[4911]: I0929 21:26:30.049424 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:30 crc kubenswrapper[4911]: I0929 21:26:30.049483 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:30 crc kubenswrapper[4911]: I0929 21:26:30.049503 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:30 crc kubenswrapper[4911]: I0929 21:26:30.049529 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:30 crc kubenswrapper[4911]: I0929 21:26:30.049547 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:30Z","lastTransitionTime":"2025-09-29T21:26:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:30 crc kubenswrapper[4911]: I0929 21:26:30.152441 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:30 crc kubenswrapper[4911]: I0929 21:26:30.152493 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:30 crc kubenswrapper[4911]: I0929 21:26:30.152510 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:30 crc kubenswrapper[4911]: I0929 21:26:30.152532 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:30 crc kubenswrapper[4911]: I0929 21:26:30.152550 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:30Z","lastTransitionTime":"2025-09-29T21:26:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:30 crc kubenswrapper[4911]: I0929 21:26:30.256432 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:30 crc kubenswrapper[4911]: I0929 21:26:30.256993 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:30 crc kubenswrapper[4911]: I0929 21:26:30.257033 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:30 crc kubenswrapper[4911]: I0929 21:26:30.257059 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:30 crc kubenswrapper[4911]: I0929 21:26:30.257112 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:30Z","lastTransitionTime":"2025-09-29T21:26:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:30 crc kubenswrapper[4911]: I0929 21:26:30.360531 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:30 crc kubenswrapper[4911]: I0929 21:26:30.361054 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:30 crc kubenswrapper[4911]: I0929 21:26:30.361202 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:30 crc kubenswrapper[4911]: I0929 21:26:30.361362 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:30 crc kubenswrapper[4911]: I0929 21:26:30.361528 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:30Z","lastTransitionTime":"2025-09-29T21:26:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:30 crc kubenswrapper[4911]: I0929 21:26:30.466261 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:30 crc kubenswrapper[4911]: I0929 21:26:30.466325 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:30 crc kubenswrapper[4911]: I0929 21:26:30.466337 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:30 crc kubenswrapper[4911]: I0929 21:26:30.466358 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:30 crc kubenswrapper[4911]: I0929 21:26:30.466370 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:30Z","lastTransitionTime":"2025-09-29T21:26:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:30 crc kubenswrapper[4911]: I0929 21:26:30.570192 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:30 crc kubenswrapper[4911]: I0929 21:26:30.570251 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:30 crc kubenswrapper[4911]: I0929 21:26:30.570267 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:30 crc kubenswrapper[4911]: I0929 21:26:30.570288 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:30 crc kubenswrapper[4911]: I0929 21:26:30.570300 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:30Z","lastTransitionTime":"2025-09-29T21:26:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:30 crc kubenswrapper[4911]: I0929 21:26:30.672752 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:30 crc kubenswrapper[4911]: I0929 21:26:30.672827 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:30 crc kubenswrapper[4911]: I0929 21:26:30.672840 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:30 crc kubenswrapper[4911]: I0929 21:26:30.672859 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:30 crc kubenswrapper[4911]: I0929 21:26:30.672875 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:30Z","lastTransitionTime":"2025-09-29T21:26:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:30 crc kubenswrapper[4911]: I0929 21:26:30.700502 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 21:26:30 crc kubenswrapper[4911]: I0929 21:26:30.700578 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-d5gdh" Sep 29 21:26:30 crc kubenswrapper[4911]: I0929 21:26:30.700518 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:26:30 crc kubenswrapper[4911]: E0929 21:26:30.700691 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 21:26:30 crc kubenswrapper[4911]: E0929 21:26:30.700825 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-d5gdh" podUID="b53f9593-39bf-43e0-b1de-09192d0167cd" Sep 29 21:26:30 crc kubenswrapper[4911]: E0929 21:26:30.700908 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 21:26:30 crc kubenswrapper[4911]: I0929 21:26:30.776144 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:30 crc kubenswrapper[4911]: I0929 21:26:30.776202 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:30 crc kubenswrapper[4911]: I0929 21:26:30.776216 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:30 crc kubenswrapper[4911]: I0929 21:26:30.776238 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:30 crc kubenswrapper[4911]: I0929 21:26:30.776254 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:30Z","lastTransitionTime":"2025-09-29T21:26:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:30 crc kubenswrapper[4911]: I0929 21:26:30.879182 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:30 crc kubenswrapper[4911]: I0929 21:26:30.879237 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:30 crc kubenswrapper[4911]: I0929 21:26:30.879247 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:30 crc kubenswrapper[4911]: I0929 21:26:30.879267 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:30 crc kubenswrapper[4911]: I0929 21:26:30.879277 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:30Z","lastTransitionTime":"2025-09-29T21:26:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:31 crc kubenswrapper[4911]: I0929 21:26:31.009689 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:31 crc kubenswrapper[4911]: I0929 21:26:31.009742 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:31 crc kubenswrapper[4911]: I0929 21:26:31.009756 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:31 crc kubenswrapper[4911]: I0929 21:26:31.009778 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:31 crc kubenswrapper[4911]: I0929 21:26:31.009813 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:31Z","lastTransitionTime":"2025-09-29T21:26:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:31 crc kubenswrapper[4911]: I0929 21:26:31.112076 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:31 crc kubenswrapper[4911]: I0929 21:26:31.112137 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:31 crc kubenswrapper[4911]: I0929 21:26:31.112154 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:31 crc kubenswrapper[4911]: I0929 21:26:31.112174 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:31 crc kubenswrapper[4911]: I0929 21:26:31.112187 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:31Z","lastTransitionTime":"2025-09-29T21:26:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:31 crc kubenswrapper[4911]: I0929 21:26:31.214662 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:31 crc kubenswrapper[4911]: I0929 21:26:31.214742 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:31 crc kubenswrapper[4911]: I0929 21:26:31.214759 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:31 crc kubenswrapper[4911]: I0929 21:26:31.214781 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:31 crc kubenswrapper[4911]: I0929 21:26:31.214820 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:31Z","lastTransitionTime":"2025-09-29T21:26:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:31 crc kubenswrapper[4911]: I0929 21:26:31.317587 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:31 crc kubenswrapper[4911]: I0929 21:26:31.317636 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:31 crc kubenswrapper[4911]: I0929 21:26:31.317645 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:31 crc kubenswrapper[4911]: I0929 21:26:31.317665 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:31 crc kubenswrapper[4911]: I0929 21:26:31.317678 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:31Z","lastTransitionTime":"2025-09-29T21:26:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:31 crc kubenswrapper[4911]: I0929 21:26:31.420135 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:31 crc kubenswrapper[4911]: I0929 21:26:31.420195 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:31 crc kubenswrapper[4911]: I0929 21:26:31.420206 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:31 crc kubenswrapper[4911]: I0929 21:26:31.420228 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:31 crc kubenswrapper[4911]: I0929 21:26:31.420242 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:31Z","lastTransitionTime":"2025-09-29T21:26:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:31 crc kubenswrapper[4911]: I0929 21:26:31.524672 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:31 crc kubenswrapper[4911]: I0929 21:26:31.524724 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:31 crc kubenswrapper[4911]: I0929 21:26:31.524738 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:31 crc kubenswrapper[4911]: I0929 21:26:31.524768 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:31 crc kubenswrapper[4911]: I0929 21:26:31.524781 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:31Z","lastTransitionTime":"2025-09-29T21:26:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:31 crc kubenswrapper[4911]: I0929 21:26:31.633935 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:31 crc kubenswrapper[4911]: I0929 21:26:31.634088 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:31 crc kubenswrapper[4911]: I0929 21:26:31.634132 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:31 crc kubenswrapper[4911]: I0929 21:26:31.634223 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:31 crc kubenswrapper[4911]: I0929 21:26:31.634534 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:31Z","lastTransitionTime":"2025-09-29T21:26:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:31 crc kubenswrapper[4911]: I0929 21:26:31.700662 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 21:26:31 crc kubenswrapper[4911]: E0929 21:26:31.700859 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 21:26:31 crc kubenswrapper[4911]: I0929 21:26:31.737670 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:31 crc kubenswrapper[4911]: I0929 21:26:31.737749 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:31 crc kubenswrapper[4911]: I0929 21:26:31.737758 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:31 crc kubenswrapper[4911]: I0929 21:26:31.737773 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:31 crc kubenswrapper[4911]: I0929 21:26:31.737808 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:31Z","lastTransitionTime":"2025-09-29T21:26:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:31 crc kubenswrapper[4911]: I0929 21:26:31.840728 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:31 crc kubenswrapper[4911]: I0929 21:26:31.840778 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:31 crc kubenswrapper[4911]: I0929 21:26:31.840807 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:31 crc kubenswrapper[4911]: I0929 21:26:31.840856 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:31 crc kubenswrapper[4911]: I0929 21:26:31.840903 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:31Z","lastTransitionTime":"2025-09-29T21:26:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:31 crc kubenswrapper[4911]: I0929 21:26:31.943741 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:31 crc kubenswrapper[4911]: I0929 21:26:31.943784 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:31 crc kubenswrapper[4911]: I0929 21:26:31.943815 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:31 crc kubenswrapper[4911]: I0929 21:26:31.943836 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:31 crc kubenswrapper[4911]: I0929 21:26:31.943848 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:31Z","lastTransitionTime":"2025-09-29T21:26:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.046585 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.046663 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.046677 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.046694 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.046705 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:32Z","lastTransitionTime":"2025-09-29T21:26:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.149009 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.149071 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.149083 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.149103 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.149119 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:32Z","lastTransitionTime":"2025-09-29T21:26:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.220746 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-lrfbg_1179c900-e866-4c5a-bb06-6032cc03a075/kube-multus/0.log" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.220806 4911 generic.go:334] "Generic (PLEG): container finished" podID="1179c900-e866-4c5a-bb06-6032cc03a075" containerID="8bb16b2f889ccaf66b96ffea2f6585967c14530a95a95000b652ce817e97c37e" exitCode=1 Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.220838 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-lrfbg" event={"ID":"1179c900-e866-4c5a-bb06-6032cc03a075","Type":"ContainerDied","Data":"8bb16b2f889ccaf66b96ffea2f6585967c14530a95a95000b652ce817e97c37e"} Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.221157 4911 scope.go:117] "RemoveContainer" containerID="8bb16b2f889ccaf66b96ffea2f6585967c14530a95a95000b652ce817e97c37e" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.246313 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85fccbbf-637b-4195-89ba-828db0f10fb3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11c497bb84920c85649975dc5a3ba883723017d5503200e47c08427f45b9edfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a594d1677deac968c88fac4a64d4b047cf02ad8d6db74ee2ebe170d4135f588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2db0acd86c2dfcd0303626aa6b66832b18aec98dab908328288cdf0e0a18f24d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72188960402faa7956adf357824c6ea0a7015a75ed5191853ca95b12feb2cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf2588ddec2be0fa843630cdc268617c399a7a1615705cc4b088fc806031d557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0546d8270e2cbba06a241e421fa7ea130e53302a3f71f81829198d7ab8f3e6b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0546d8270e2cbba06a241e421fa7ea130e53302a3f71f81829198d7ab8f3e6b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123ea91d0a01f780d09196824c362eab68a4b8d943d414e80a3a0bbc2959e178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://123ea91d0a01f780d09196824c362eab68a4b8d943d414e80a3a0bbc2959e178\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://17751d3a9dd568ddb6c27efe08354f37aa4af7a91312e8c41e3e2e509d5be8ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://17751d3a9dd568ddb6c27efe08354f37aa4af7a91312e8c41e3e2e509d5be8ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:32Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.253433 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.253475 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.253486 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.253506 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.253519 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:32Z","lastTransitionTime":"2025-09-29T21:26:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.267061 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c6a0af8cc03379c6fa2a3969e5852a340c32bd8513dc7c963e074bb1694d436\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:32Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.280452 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dnhjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fe62a53-f0b8-4c66-8adf-3b9f8bef4195\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b11f03867ce6c678d04431a19903c21acd7665f0cca4c7b0344e9d1cf0e17e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64hpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dnhjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:32Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.296397 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bp485" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"364060da-3bac-4f3e-b8b8-a64b0441cb5e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7dba9267c23bc8990d70b320a688e96796cf75aca99f5a3ce12fa82327cb2d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7962008bbec5a6556b4ab76cf151a68e971bde07fdc5fe560c561f42cf597d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7962008bbec5a6556b4ab76cf151a68e971bde07fdc5fe560c561f42cf597d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a6af280cf9196a9ed2a1798f7f4e7d6504ef6707c1d265fb9b232c02d70c205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a6af280cf9196a9ed2a1798f7f4e7d6504ef6707c1d265fb9b232c02d70c205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4783145a17a1233241a3766d9b259d16ed82b2076ff6790c1a368d713271cd91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4783145a17a1233241a3766d9b259d16ed82b2076ff6790c1a368d713271cd91\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0bb5fec15ae4eb585d8ba1c0bedabe62a4094ca50044a09b44439dc555f064d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0bb5fec15ae4eb585d8ba1c0bedabe62a4094ca50044a09b44439dc555f064d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c153df9063b1a1e897e8d02e139d641d00d84dce42004be4c69dd55d44e7436b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c153df9063b1a1e897e8d02e139d641d00d84dce42004be4c69dd55d44e7436b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9acf5d7a17bf6bb6d1ad415d830594b8f6b986fbb7c58706b19d28546dfae6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9acf5d7a17bf6bb6d1ad415d830594b8f6b986fbb7c58706b19d28546dfae6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bp485\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:32Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.311666 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dz6zq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c16af8d-e647-4820-b96f-298cce113ab1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e64c28e47bb861649d58b34f5e3c369cfaf8c808435b32a13ebfc2c02b7b6db9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbgg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dz6zq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:32Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.325019 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w647f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50640abc-40db-4390-82d1-f3cfc76da71c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0479676aa753b6e60f9a57eb2a3055ddc5b10bce1547ac60e216c0865e79f24b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://568d099d7beea914cf9f9eb703e36dd179359d3f4406bae454334e39b0de79ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w647f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:32Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.342578 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f56832a-7fd7-428e-85ee-55c5dea7b67a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ff3cf4013e69405637fe1483d5eb8ab2d12a0ba222c8446d93445dcb94ac9b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b32578d3e75bb839ad134f88850ba1b26e57846431fac71b64c5ddbce802464\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4539e6f57be0c686d5ea6ccbf1537c7c93698f81d2fdcfe5a143ce1f014e09\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57c9f68393ebed4fd5fe219544b0585a743989f8c54239e19ec702bddf9c30f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9dfa222360c949632050bd1dc5453a14c0dfa0d4614282132ceea2297405443e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T21:25:36Z\\\",\\\"message\\\":\\\"W0929 21:25:25.867198 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 21:25:25.867655 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759181125 cert, and key in /tmp/serving-cert-2091738680/serving-signer.crt, /tmp/serving-cert-2091738680/serving-signer.key\\\\nI0929 21:25:26.230899 1 observer_polling.go:159] Starting file observer\\\\nW0929 21:25:26.235864 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 21:25:26.236118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 21:25:26.238856 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2091738680/tls.crt::/tmp/serving-cert-2091738680/tls.key\\\\\\\"\\\\nF0929 21:25:36.606155 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47cf5d1e65f887e84d3c0694d5038edf3451c4af14d08a29d48562d05e36ea26\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:32Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.357168 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8646497-f7ad-4b38-a69f-9a14345af1c8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d3bb437bdac371dd4e22f20f304a5381c128ac2b774c51bc4f748a1c7aa3834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://129d51f84c8798ed7b1078f34899d4231c7bf4e1a75212f694c6c027b8463b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://290e37c77297fae7dcb3540911f6b2917e26daee3623c9106e34de394ccfa01f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7088f5a3752b8778de247eacb408d511197d1501e8e2811a98a9b713ca1fca04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:32Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.357890 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.357933 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.357944 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.357962 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.357974 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:32Z","lastTransitionTime":"2025-09-29T21:26:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.372530 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:32Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.386149 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:32Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.402750 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4052e710e0a06bf38d26e6006248d090051284bc0eae7a68b17241ef8016909\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a7d7fad5efb67cc7ef796977f03cc9c0e69fcc4340ca28749fffcc13236ed8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:32Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.421728 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lrfbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1179c900-e866-4c5a-bb06-6032cc03a075\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bb16b2f889ccaf66b96ffea2f6585967c14530a95a95000b652ce817e97c37e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bb16b2f889ccaf66b96ffea2f6585967c14530a95a95000b652ce817e97c37e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T21:26:32Z\\\",\\\"message\\\":\\\"2025-09-29T21:25:46+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_ffacfa39-705c-4aac-940e-47d0cb57c1d5\\\\n2025-09-29T21:25:46+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_ffacfa39-705c-4aac-940e-47d0cb57c1d5 to /host/opt/cni/bin/\\\\n2025-09-29T21:25:47Z [verbose] multus-daemon started\\\\n2025-09-29T21:25:47Z [verbose] Readiness Indicator file check\\\\n2025-09-29T21:26:32Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jwhfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lrfbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:32Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.440155 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e3aa70f-b0da-44c9-a850-96d4494b02fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3568f71a4a1762e0d9be8ce4c317471f3f99a7c5b92b7daeb9fbace778f7e66b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00681163bfe474dabf977e0c43ac71148c2a472efd4bc57f3d145ba1134a74aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db1941e9959584d925b6ac546c234ce0dfaa2960a1277f282b51deb5ee56ec87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e87c1e61cde26c6ae4496e13ebe8e46857c8656720399f3d89b9e9a75bb4681d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c06f6555e780f2f9f69cabcd3e26595acfde3e973387ed7b10345c673c17f5d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c62aa953604f4ce35ca351ecf4e03da067af96f004c3ba3ea5f3a54b89def33d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de700bedd50f64841f1af98c4ba1a6b209e5990da83985a88e512b3079391876\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de700bedd50f64841f1af98c4ba1a6b209e5990da83985a88e512b3079391876\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T21:26:11Z\\\",\\\"message\\\":\\\"troller\\\\nI0929 21:26:11.697630 6582 admin_network_policy_namespace.go:56] Finished syncing Namespace openshift-service-ca-operator Admin Network Policy controller: took 9.93µs\\\\nI0929 21:26:11.697571 6582 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0929 21:26:11.697666 6582 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0929 21:26:11.697693 6582 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0929 21:26:11.697699 6582 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0929 21:26:11.697816 6582 factory.go:656] Stopping watch factory\\\\nI0929 21:26:11.697792 6582 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0929 21:26:11.697812 6582 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0929 21:26:11.697832 6582 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0929 21:26:11.697843 6582 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0929 21:26:11.697940 6582 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0929 21:26:11.698057 6582 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0929 21:26:11.698113 6582 ovnkube.go:599] Stopped ovnkube\\\\nI0929 21:26:11.698146 6582 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0929 21:26:11.698263 6582 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:26:10Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-9wxd8_openshift-ovn-kubernetes(4e3aa70f-b0da-44c9-a850-96d4494b02fc)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68ec54cc6e84d45b9550a2dcbd53b3ffc8cc97cb94da8d9961b29965af08b7e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9wxd8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:32Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.457403 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-d5gdh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b53f9593-39bf-43e0-b1de-09192d0167cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-24xkh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-24xkh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:57Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-d5gdh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:32Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.462232 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.462266 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.462277 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.462295 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.462308 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:32Z","lastTransitionTime":"2025-09-29T21:26:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.475900 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a2ba6d0-d11e-4f12-b9d7-f9a5b97d306a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9e660ab2714b6ff9ddefb3634d2ae48dabf0a144b5f9ba96d429654fde989f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed4c0d97fe39092f5b2cf1e4575d8ea9238b60085270aec20f28727379f9a1a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4aab1c186df02eac1c6a9a54cf66510d44e4c63bad2da3cdbe53923869cc01cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7e8a2aec2b50058d3e0f16b0756724feebf54c180e50dbbec1dc319c31234bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7e8a2aec2b50058d3e0f16b0756724feebf54c180e50dbbec1dc319c31234bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:32Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.490449 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:32Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.506867 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a92f3bb864cba0c6615e8a07233e923c0d0681e83ad6722aa7c7043fb5966d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:32Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.519836 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cfbgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5b94596-b945-4a89-b362-ec649e8e7981\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5154aa6a1a3a9b1a5a58752607c01b955e28b7e896bf9b99b5cca2448077ebd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm7xf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1f8c9f36d4fa9ccfe9c4fdf2794b819a0de928056fc3c320b6c83f9c518258f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm7xf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:56Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-cfbgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:32Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.565531 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.565573 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.565583 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.565600 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.565611 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:32Z","lastTransitionTime":"2025-09-29T21:26:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.668632 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.668679 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.668690 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.668711 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.668728 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:32Z","lastTransitionTime":"2025-09-29T21:26:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.704052 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-d5gdh" Sep 29 21:26:32 crc kubenswrapper[4911]: E0929 21:26:32.704212 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-d5gdh" podUID="b53f9593-39bf-43e0-b1de-09192d0167cd" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.704463 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 21:26:32 crc kubenswrapper[4911]: E0929 21:26:32.704539 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.704699 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:26:32 crc kubenswrapper[4911]: E0929 21:26:32.704771 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.719153 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a2ba6d0-d11e-4f12-b9d7-f9a5b97d306a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9e660ab2714b6ff9ddefb3634d2ae48dabf0a144b5f9ba96d429654fde989f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed4c0d97fe39092f5b2cf1e4575d8ea9238b60085270aec20f28727379f9a1a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4aab1c186df02eac1c6a9a54cf66510d44e4c63bad2da3cdbe53923869cc01cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7e8a2aec2b50058d3e0f16b0756724feebf54c180e50dbbec1dc319c31234bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7e8a2aec2b50058d3e0f16b0756724feebf54c180e50dbbec1dc319c31234bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:32Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.740405 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:32Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.763051 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a92f3bb864cba0c6615e8a07233e923c0d0681e83ad6722aa7c7043fb5966d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:32Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.771715 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.771771 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.771782 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.771812 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.771823 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:32Z","lastTransitionTime":"2025-09-29T21:26:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.783101 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cfbgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5b94596-b945-4a89-b362-ec649e8e7981\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5154aa6a1a3a9b1a5a58752607c01b955e28b7e896bf9b99b5cca2448077ebd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm7xf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1f8c9f36d4fa9ccfe9c4fdf2794b819a0de928056fc3c320b6c83f9c518258f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm7xf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:56Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-cfbgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:32Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.820259 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85fccbbf-637b-4195-89ba-828db0f10fb3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11c497bb84920c85649975dc5a3ba883723017d5503200e47c08427f45b9edfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a594d1677deac968c88fac4a64d4b047cf02ad8d6db74ee2ebe170d4135f588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2db0acd86c2dfcd0303626aa6b66832b18aec98dab908328288cdf0e0a18f24d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72188960402faa7956adf357824c6ea0a7015a75ed5191853ca95b12feb2cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf2588ddec2be0fa843630cdc268617c399a7a1615705cc4b088fc806031d557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0546d8270e2cbba06a241e421fa7ea130e53302a3f71f81829198d7ab8f3e6b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0546d8270e2cbba06a241e421fa7ea130e53302a3f71f81829198d7ab8f3e6b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123ea91d0a01f780d09196824c362eab68a4b8d943d414e80a3a0bbc2959e178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://123ea91d0a01f780d09196824c362eab68a4b8d943d414e80a3a0bbc2959e178\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://17751d3a9dd568ddb6c27efe08354f37aa4af7a91312e8c41e3e2e509d5be8ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://17751d3a9dd568ddb6c27efe08354f37aa4af7a91312e8c41e3e2e509d5be8ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:32Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.837079 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c6a0af8cc03379c6fa2a3969e5852a340c32bd8513dc7c963e074bb1694d436\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:32Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.850717 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dnhjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fe62a53-f0b8-4c66-8adf-3b9f8bef4195\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b11f03867ce6c678d04431a19903c21acd7665f0cca4c7b0344e9d1cf0e17e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64hpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dnhjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:32Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.869814 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bp485" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"364060da-3bac-4f3e-b8b8-a64b0441cb5e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7dba9267c23bc8990d70b320a688e96796cf75aca99f5a3ce12fa82327cb2d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7962008bbec5a6556b4ab76cf151a68e971bde07fdc5fe560c561f42cf597d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7962008bbec5a6556b4ab76cf151a68e971bde07fdc5fe560c561f42cf597d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a6af280cf9196a9ed2a1798f7f4e7d6504ef6707c1d265fb9b232c02d70c205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a6af280cf9196a9ed2a1798f7f4e7d6504ef6707c1d265fb9b232c02d70c205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4783145a17a1233241a3766d9b259d16ed82b2076ff6790c1a368d713271cd91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4783145a17a1233241a3766d9b259d16ed82b2076ff6790c1a368d713271cd91\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0bb5fec15ae4eb585d8ba1c0bedabe62a4094ca50044a09b44439dc555f064d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0bb5fec15ae4eb585d8ba1c0bedabe62a4094ca50044a09b44439dc555f064d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c153df9063b1a1e897e8d02e139d641d00d84dce42004be4c69dd55d44e7436b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c153df9063b1a1e897e8d02e139d641d00d84dce42004be4c69dd55d44e7436b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9acf5d7a17bf6bb6d1ad415d830594b8f6b986fbb7c58706b19d28546dfae6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9acf5d7a17bf6bb6d1ad415d830594b8f6b986fbb7c58706b19d28546dfae6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bp485\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:32Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.875074 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.875117 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.875128 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.875207 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.875225 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:32Z","lastTransitionTime":"2025-09-29T21:26:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.886278 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dz6zq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c16af8d-e647-4820-b96f-298cce113ab1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e64c28e47bb861649d58b34f5e3c369cfaf8c808435b32a13ebfc2c02b7b6db9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbgg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dz6zq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:32Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.900333 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w647f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50640abc-40db-4390-82d1-f3cfc76da71c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0479676aa753b6e60f9a57eb2a3055ddc5b10bce1547ac60e216c0865e79f24b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://568d099d7beea914cf9f9eb703e36dd179359d3f4406bae454334e39b0de79ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w647f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:32Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.916106 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-d5gdh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b53f9593-39bf-43e0-b1de-09192d0167cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-24xkh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-24xkh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:57Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-d5gdh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:32Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.934924 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f56832a-7fd7-428e-85ee-55c5dea7b67a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ff3cf4013e69405637fe1483d5eb8ab2d12a0ba222c8446d93445dcb94ac9b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b32578d3e75bb839ad134f88850ba1b26e57846431fac71b64c5ddbce802464\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4539e6f57be0c686d5ea6ccbf1537c7c93698f81d2fdcfe5a143ce1f014e09\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57c9f68393ebed4fd5fe219544b0585a743989f8c54239e19ec702bddf9c30f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9dfa222360c949632050bd1dc5453a14c0dfa0d4614282132ceea2297405443e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T21:25:36Z\\\",\\\"message\\\":\\\"W0929 21:25:25.867198 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 21:25:25.867655 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759181125 cert, and key in /tmp/serving-cert-2091738680/serving-signer.crt, /tmp/serving-cert-2091738680/serving-signer.key\\\\nI0929 21:25:26.230899 1 observer_polling.go:159] Starting file observer\\\\nW0929 21:25:26.235864 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 21:25:26.236118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 21:25:26.238856 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2091738680/tls.crt::/tmp/serving-cert-2091738680/tls.key\\\\\\\"\\\\nF0929 21:25:36.606155 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47cf5d1e65f887e84d3c0694d5038edf3451c4af14d08a29d48562d05e36ea26\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:32Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.951373 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8646497-f7ad-4b38-a69f-9a14345af1c8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d3bb437bdac371dd4e22f20f304a5381c128ac2b774c51bc4f748a1c7aa3834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://129d51f84c8798ed7b1078f34899d4231c7bf4e1a75212f694c6c027b8463b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://290e37c77297fae7dcb3540911f6b2917e26daee3623c9106e34de394ccfa01f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7088f5a3752b8778de247eacb408d511197d1501e8e2811a98a9b713ca1fca04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:32Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.967571 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:32Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.978112 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.978174 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.978191 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.978213 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.978229 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:32Z","lastTransitionTime":"2025-09-29T21:26:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:32 crc kubenswrapper[4911]: I0929 21:26:32.988435 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:32Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:33 crc kubenswrapper[4911]: I0929 21:26:33.005258 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4052e710e0a06bf38d26e6006248d090051284bc0eae7a68b17241ef8016909\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a7d7fad5efb67cc7ef796977f03cc9c0e69fcc4340ca28749fffcc13236ed8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:33Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:33 crc kubenswrapper[4911]: I0929 21:26:33.022139 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lrfbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1179c900-e866-4c5a-bb06-6032cc03a075\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bb16b2f889ccaf66b96ffea2f6585967c14530a95a95000b652ce817e97c37e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bb16b2f889ccaf66b96ffea2f6585967c14530a95a95000b652ce817e97c37e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T21:26:32Z\\\",\\\"message\\\":\\\"2025-09-29T21:25:46+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_ffacfa39-705c-4aac-940e-47d0cb57c1d5\\\\n2025-09-29T21:25:46+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_ffacfa39-705c-4aac-940e-47d0cb57c1d5 to /host/opt/cni/bin/\\\\n2025-09-29T21:25:47Z [verbose] multus-daemon started\\\\n2025-09-29T21:25:47Z [verbose] Readiness Indicator file check\\\\n2025-09-29T21:26:32Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jwhfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lrfbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:33Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:33 crc kubenswrapper[4911]: I0929 21:26:33.055754 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e3aa70f-b0da-44c9-a850-96d4494b02fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3568f71a4a1762e0d9be8ce4c317471f3f99a7c5b92b7daeb9fbace778f7e66b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00681163bfe474dabf977e0c43ac71148c2a472efd4bc57f3d145ba1134a74aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db1941e9959584d925b6ac546c234ce0dfaa2960a1277f282b51deb5ee56ec87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e87c1e61cde26c6ae4496e13ebe8e46857c8656720399f3d89b9e9a75bb4681d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c06f6555e780f2f9f69cabcd3e26595acfde3e973387ed7b10345c673c17f5d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c62aa953604f4ce35ca351ecf4e03da067af96f004c3ba3ea5f3a54b89def33d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de700bedd50f64841f1af98c4ba1a6b209e5990da83985a88e512b3079391876\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de700bedd50f64841f1af98c4ba1a6b209e5990da83985a88e512b3079391876\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T21:26:11Z\\\",\\\"message\\\":\\\"troller\\\\nI0929 21:26:11.697630 6582 admin_network_policy_namespace.go:56] Finished syncing Namespace openshift-service-ca-operator Admin Network Policy controller: took 9.93µs\\\\nI0929 21:26:11.697571 6582 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0929 21:26:11.697666 6582 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0929 21:26:11.697693 6582 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0929 21:26:11.697699 6582 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0929 21:26:11.697816 6582 factory.go:656] Stopping watch factory\\\\nI0929 21:26:11.697792 6582 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0929 21:26:11.697812 6582 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0929 21:26:11.697832 6582 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0929 21:26:11.697843 6582 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0929 21:26:11.697940 6582 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0929 21:26:11.698057 6582 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0929 21:26:11.698113 6582 ovnkube.go:599] Stopped ovnkube\\\\nI0929 21:26:11.698146 6582 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0929 21:26:11.698263 6582 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:26:10Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-9wxd8_openshift-ovn-kubernetes(4e3aa70f-b0da-44c9-a850-96d4494b02fc)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68ec54cc6e84d45b9550a2dcbd53b3ffc8cc97cb94da8d9961b29965af08b7e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9wxd8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:33Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:33 crc kubenswrapper[4911]: I0929 21:26:33.080907 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:33 crc kubenswrapper[4911]: I0929 21:26:33.080949 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:33 crc kubenswrapper[4911]: I0929 21:26:33.080960 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:33 crc kubenswrapper[4911]: I0929 21:26:33.080979 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:33 crc kubenswrapper[4911]: I0929 21:26:33.080992 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:33Z","lastTransitionTime":"2025-09-29T21:26:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:33 crc kubenswrapper[4911]: I0929 21:26:33.186675 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:33 crc kubenswrapper[4911]: I0929 21:26:33.186749 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:33 crc kubenswrapper[4911]: I0929 21:26:33.186773 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:33 crc kubenswrapper[4911]: I0929 21:26:33.186829 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:33 crc kubenswrapper[4911]: I0929 21:26:33.186850 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:33Z","lastTransitionTime":"2025-09-29T21:26:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:33 crc kubenswrapper[4911]: I0929 21:26:33.228598 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-lrfbg_1179c900-e866-4c5a-bb06-6032cc03a075/kube-multus/0.log" Sep 29 21:26:33 crc kubenswrapper[4911]: I0929 21:26:33.228696 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-lrfbg" event={"ID":"1179c900-e866-4c5a-bb06-6032cc03a075","Type":"ContainerStarted","Data":"bc7df21f84f39b5c19f6039c6a102c478f158684a5952a968322654eecec14fd"} Sep 29 21:26:33 crc kubenswrapper[4911]: I0929 21:26:33.251742 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a2ba6d0-d11e-4f12-b9d7-f9a5b97d306a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9e660ab2714b6ff9ddefb3634d2ae48dabf0a144b5f9ba96d429654fde989f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed4c0d97fe39092f5b2cf1e4575d8ea9238b60085270aec20f28727379f9a1a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4aab1c186df02eac1c6a9a54cf66510d44e4c63bad2da3cdbe53923869cc01cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7e8a2aec2b50058d3e0f16b0756724feebf54c180e50dbbec1dc319c31234bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7e8a2aec2b50058d3e0f16b0756724feebf54c180e50dbbec1dc319c31234bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:33Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:33 crc kubenswrapper[4911]: I0929 21:26:33.272831 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:33Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:33 crc kubenswrapper[4911]: I0929 21:26:33.287416 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a92f3bb864cba0c6615e8a07233e923c0d0681e83ad6722aa7c7043fb5966d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:33Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:33 crc kubenswrapper[4911]: I0929 21:26:33.291595 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:33 crc kubenswrapper[4911]: I0929 21:26:33.291718 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:33 crc kubenswrapper[4911]: I0929 21:26:33.291815 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:33 crc kubenswrapper[4911]: I0929 21:26:33.291888 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:33 crc kubenswrapper[4911]: I0929 21:26:33.291966 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:33Z","lastTransitionTime":"2025-09-29T21:26:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:33 crc kubenswrapper[4911]: I0929 21:26:33.305218 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cfbgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5b94596-b945-4a89-b362-ec649e8e7981\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5154aa6a1a3a9b1a5a58752607c01b955e28b7e896bf9b99b5cca2448077ebd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm7xf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1f8c9f36d4fa9ccfe9c4fdf2794b819a0de928056fc3c320b6c83f9c518258f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm7xf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:56Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-cfbgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:33Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:33 crc kubenswrapper[4911]: I0929 21:26:33.339375 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85fccbbf-637b-4195-89ba-828db0f10fb3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11c497bb84920c85649975dc5a3ba883723017d5503200e47c08427f45b9edfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a594d1677deac968c88fac4a64d4b047cf02ad8d6db74ee2ebe170d4135f588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2db0acd86c2dfcd0303626aa6b66832b18aec98dab908328288cdf0e0a18f24d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72188960402faa7956adf357824c6ea0a7015a75ed5191853ca95b12feb2cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf2588ddec2be0fa843630cdc268617c399a7a1615705cc4b088fc806031d557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0546d8270e2cbba06a241e421fa7ea130e53302a3f71f81829198d7ab8f3e6b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0546d8270e2cbba06a241e421fa7ea130e53302a3f71f81829198d7ab8f3e6b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123ea91d0a01f780d09196824c362eab68a4b8d943d414e80a3a0bbc2959e178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://123ea91d0a01f780d09196824c362eab68a4b8d943d414e80a3a0bbc2959e178\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://17751d3a9dd568ddb6c27efe08354f37aa4af7a91312e8c41e3e2e509d5be8ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://17751d3a9dd568ddb6c27efe08354f37aa4af7a91312e8c41e3e2e509d5be8ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:33Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:33 crc kubenswrapper[4911]: I0929 21:26:33.353338 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c6a0af8cc03379c6fa2a3969e5852a340c32bd8513dc7c963e074bb1694d436\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:33Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:33 crc kubenswrapper[4911]: I0929 21:26:33.366712 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dnhjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fe62a53-f0b8-4c66-8adf-3b9f8bef4195\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b11f03867ce6c678d04431a19903c21acd7665f0cca4c7b0344e9d1cf0e17e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64hpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dnhjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:33Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:33 crc kubenswrapper[4911]: I0929 21:26:33.384841 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bp485" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"364060da-3bac-4f3e-b8b8-a64b0441cb5e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7dba9267c23bc8990d70b320a688e96796cf75aca99f5a3ce12fa82327cb2d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7962008bbec5a6556b4ab76cf151a68e971bde07fdc5fe560c561f42cf597d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7962008bbec5a6556b4ab76cf151a68e971bde07fdc5fe560c561f42cf597d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a6af280cf9196a9ed2a1798f7f4e7d6504ef6707c1d265fb9b232c02d70c205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a6af280cf9196a9ed2a1798f7f4e7d6504ef6707c1d265fb9b232c02d70c205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4783145a17a1233241a3766d9b259d16ed82b2076ff6790c1a368d713271cd91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4783145a17a1233241a3766d9b259d16ed82b2076ff6790c1a368d713271cd91\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0bb5fec15ae4eb585d8ba1c0bedabe62a4094ca50044a09b44439dc555f064d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0bb5fec15ae4eb585d8ba1c0bedabe62a4094ca50044a09b44439dc555f064d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c153df9063b1a1e897e8d02e139d641d00d84dce42004be4c69dd55d44e7436b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c153df9063b1a1e897e8d02e139d641d00d84dce42004be4c69dd55d44e7436b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9acf5d7a17bf6bb6d1ad415d830594b8f6b986fbb7c58706b19d28546dfae6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9acf5d7a17bf6bb6d1ad415d830594b8f6b986fbb7c58706b19d28546dfae6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bp485\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:33Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:33 crc kubenswrapper[4911]: I0929 21:26:33.394388 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:33 crc kubenswrapper[4911]: I0929 21:26:33.394430 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:33 crc kubenswrapper[4911]: I0929 21:26:33.394443 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:33 crc kubenswrapper[4911]: I0929 21:26:33.394462 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:33 crc kubenswrapper[4911]: I0929 21:26:33.394476 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:33Z","lastTransitionTime":"2025-09-29T21:26:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:33 crc kubenswrapper[4911]: I0929 21:26:33.398862 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dz6zq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c16af8d-e647-4820-b96f-298cce113ab1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e64c28e47bb861649d58b34f5e3c369cfaf8c808435b32a13ebfc2c02b7b6db9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbgg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dz6zq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:33Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:33 crc kubenswrapper[4911]: I0929 21:26:33.414836 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w647f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50640abc-40db-4390-82d1-f3cfc76da71c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0479676aa753b6e60f9a57eb2a3055ddc5b10bce1547ac60e216c0865e79f24b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://568d099d7beea914cf9f9eb703e36dd179359d3f4406bae454334e39b0de79ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w647f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:33Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:33 crc kubenswrapper[4911]: I0929 21:26:33.430687 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-d5gdh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b53f9593-39bf-43e0-b1de-09192d0167cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-24xkh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-24xkh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:57Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-d5gdh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:33Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:33 crc kubenswrapper[4911]: I0929 21:26:33.448479 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f56832a-7fd7-428e-85ee-55c5dea7b67a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ff3cf4013e69405637fe1483d5eb8ab2d12a0ba222c8446d93445dcb94ac9b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b32578d3e75bb839ad134f88850ba1b26e57846431fac71b64c5ddbce802464\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4539e6f57be0c686d5ea6ccbf1537c7c93698f81d2fdcfe5a143ce1f014e09\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57c9f68393ebed4fd5fe219544b0585a743989f8c54239e19ec702bddf9c30f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9dfa222360c949632050bd1dc5453a14c0dfa0d4614282132ceea2297405443e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T21:25:36Z\\\",\\\"message\\\":\\\"W0929 21:25:25.867198 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 21:25:25.867655 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759181125 cert, and key in /tmp/serving-cert-2091738680/serving-signer.crt, /tmp/serving-cert-2091738680/serving-signer.key\\\\nI0929 21:25:26.230899 1 observer_polling.go:159] Starting file observer\\\\nW0929 21:25:26.235864 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 21:25:26.236118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 21:25:26.238856 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2091738680/tls.crt::/tmp/serving-cert-2091738680/tls.key\\\\\\\"\\\\nF0929 21:25:36.606155 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47cf5d1e65f887e84d3c0694d5038edf3451c4af14d08a29d48562d05e36ea26\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:33Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:33 crc kubenswrapper[4911]: I0929 21:26:33.464257 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8646497-f7ad-4b38-a69f-9a14345af1c8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d3bb437bdac371dd4e22f20f304a5381c128ac2b774c51bc4f748a1c7aa3834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://129d51f84c8798ed7b1078f34899d4231c7bf4e1a75212f694c6c027b8463b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://290e37c77297fae7dcb3540911f6b2917e26daee3623c9106e34de394ccfa01f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7088f5a3752b8778de247eacb408d511197d1501e8e2811a98a9b713ca1fca04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:33Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:33 crc kubenswrapper[4911]: I0929 21:26:33.477718 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:33Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:33 crc kubenswrapper[4911]: I0929 21:26:33.493407 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:33Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:33 crc kubenswrapper[4911]: I0929 21:26:33.501486 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:33 crc kubenswrapper[4911]: I0929 21:26:33.501540 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:33 crc kubenswrapper[4911]: I0929 21:26:33.501552 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:33 crc kubenswrapper[4911]: I0929 21:26:33.501570 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:33 crc kubenswrapper[4911]: I0929 21:26:33.501583 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:33Z","lastTransitionTime":"2025-09-29T21:26:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:33 crc kubenswrapper[4911]: I0929 21:26:33.509485 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4052e710e0a06bf38d26e6006248d090051284bc0eae7a68b17241ef8016909\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a7d7fad5efb67cc7ef796977f03cc9c0e69fcc4340ca28749fffcc13236ed8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:33Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:33 crc kubenswrapper[4911]: I0929 21:26:33.525140 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lrfbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1179c900-e866-4c5a-bb06-6032cc03a075\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc7df21f84f39b5c19f6039c6a102c478f158684a5952a968322654eecec14fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bb16b2f889ccaf66b96ffea2f6585967c14530a95a95000b652ce817e97c37e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T21:26:32Z\\\",\\\"message\\\":\\\"2025-09-29T21:25:46+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_ffacfa39-705c-4aac-940e-47d0cb57c1d5\\\\n2025-09-29T21:25:46+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_ffacfa39-705c-4aac-940e-47d0cb57c1d5 to /host/opt/cni/bin/\\\\n2025-09-29T21:25:47Z [verbose] multus-daemon started\\\\n2025-09-29T21:25:47Z [verbose] Readiness Indicator file check\\\\n2025-09-29T21:26:32Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:26:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jwhfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lrfbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:33Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:33 crc kubenswrapper[4911]: I0929 21:26:33.544302 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e3aa70f-b0da-44c9-a850-96d4494b02fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3568f71a4a1762e0d9be8ce4c317471f3f99a7c5b92b7daeb9fbace778f7e66b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00681163bfe474dabf977e0c43ac71148c2a472efd4bc57f3d145ba1134a74aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db1941e9959584d925b6ac546c234ce0dfaa2960a1277f282b51deb5ee56ec87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e87c1e61cde26c6ae4496e13ebe8e46857c8656720399f3d89b9e9a75bb4681d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c06f6555e780f2f9f69cabcd3e26595acfde3e973387ed7b10345c673c17f5d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c62aa953604f4ce35ca351ecf4e03da067af96f004c3ba3ea5f3a54b89def33d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de700bedd50f64841f1af98c4ba1a6b209e5990da83985a88e512b3079391876\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de700bedd50f64841f1af98c4ba1a6b209e5990da83985a88e512b3079391876\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T21:26:11Z\\\",\\\"message\\\":\\\"troller\\\\nI0929 21:26:11.697630 6582 admin_network_policy_namespace.go:56] Finished syncing Namespace openshift-service-ca-operator Admin Network Policy controller: took 9.93µs\\\\nI0929 21:26:11.697571 6582 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0929 21:26:11.697666 6582 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0929 21:26:11.697693 6582 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0929 21:26:11.697699 6582 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0929 21:26:11.697816 6582 factory.go:656] Stopping watch factory\\\\nI0929 21:26:11.697792 6582 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0929 21:26:11.697812 6582 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0929 21:26:11.697832 6582 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0929 21:26:11.697843 6582 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0929 21:26:11.697940 6582 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0929 21:26:11.698057 6582 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0929 21:26:11.698113 6582 ovnkube.go:599] Stopped ovnkube\\\\nI0929 21:26:11.698146 6582 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0929 21:26:11.698263 6582 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:26:10Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-9wxd8_openshift-ovn-kubernetes(4e3aa70f-b0da-44c9-a850-96d4494b02fc)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68ec54cc6e84d45b9550a2dcbd53b3ffc8cc97cb94da8d9961b29965af08b7e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9wxd8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:33Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:33 crc kubenswrapper[4911]: I0929 21:26:33.604484 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:33 crc kubenswrapper[4911]: I0929 21:26:33.604543 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:33 crc kubenswrapper[4911]: I0929 21:26:33.604559 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:33 crc kubenswrapper[4911]: I0929 21:26:33.604584 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:33 crc kubenswrapper[4911]: I0929 21:26:33.604601 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:33Z","lastTransitionTime":"2025-09-29T21:26:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:33 crc kubenswrapper[4911]: I0929 21:26:33.700571 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 21:26:33 crc kubenswrapper[4911]: E0929 21:26:33.701227 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 21:26:33 crc kubenswrapper[4911]: I0929 21:26:33.707408 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:33 crc kubenswrapper[4911]: I0929 21:26:33.707444 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:33 crc kubenswrapper[4911]: I0929 21:26:33.707456 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:33 crc kubenswrapper[4911]: I0929 21:26:33.707472 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:33 crc kubenswrapper[4911]: I0929 21:26:33.707482 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:33Z","lastTransitionTime":"2025-09-29T21:26:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:33 crc kubenswrapper[4911]: I0929 21:26:33.810024 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:33 crc kubenswrapper[4911]: I0929 21:26:33.810050 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:33 crc kubenswrapper[4911]: I0929 21:26:33.810060 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:33 crc kubenswrapper[4911]: I0929 21:26:33.810076 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:33 crc kubenswrapper[4911]: I0929 21:26:33.810088 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:33Z","lastTransitionTime":"2025-09-29T21:26:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:33 crc kubenswrapper[4911]: I0929 21:26:33.912724 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:33 crc kubenswrapper[4911]: I0929 21:26:33.912781 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:33 crc kubenswrapper[4911]: I0929 21:26:33.912812 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:33 crc kubenswrapper[4911]: I0929 21:26:33.912834 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:33 crc kubenswrapper[4911]: I0929 21:26:33.912849 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:33Z","lastTransitionTime":"2025-09-29T21:26:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:34 crc kubenswrapper[4911]: I0929 21:26:34.015844 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:34 crc kubenswrapper[4911]: I0929 21:26:34.015886 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:34 crc kubenswrapper[4911]: I0929 21:26:34.015898 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:34 crc kubenswrapper[4911]: I0929 21:26:34.015914 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:34 crc kubenswrapper[4911]: I0929 21:26:34.015927 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:34Z","lastTransitionTime":"2025-09-29T21:26:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:34 crc kubenswrapper[4911]: I0929 21:26:34.119835 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:34 crc kubenswrapper[4911]: I0929 21:26:34.119888 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:34 crc kubenswrapper[4911]: I0929 21:26:34.119898 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:34 crc kubenswrapper[4911]: I0929 21:26:34.119917 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:34 crc kubenswrapper[4911]: I0929 21:26:34.119932 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:34Z","lastTransitionTime":"2025-09-29T21:26:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:34 crc kubenswrapper[4911]: I0929 21:26:34.222893 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:34 crc kubenswrapper[4911]: I0929 21:26:34.222960 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:34 crc kubenswrapper[4911]: I0929 21:26:34.222972 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:34 crc kubenswrapper[4911]: I0929 21:26:34.222994 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:34 crc kubenswrapper[4911]: I0929 21:26:34.223008 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:34Z","lastTransitionTime":"2025-09-29T21:26:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:34 crc kubenswrapper[4911]: I0929 21:26:34.326298 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:34 crc kubenswrapper[4911]: I0929 21:26:34.326341 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:34 crc kubenswrapper[4911]: I0929 21:26:34.326353 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:34 crc kubenswrapper[4911]: I0929 21:26:34.326368 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:34 crc kubenswrapper[4911]: I0929 21:26:34.326385 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:34Z","lastTransitionTime":"2025-09-29T21:26:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:34 crc kubenswrapper[4911]: I0929 21:26:34.428893 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:34 crc kubenswrapper[4911]: I0929 21:26:34.428938 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:34 crc kubenswrapper[4911]: I0929 21:26:34.428948 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:34 crc kubenswrapper[4911]: I0929 21:26:34.428968 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:34 crc kubenswrapper[4911]: I0929 21:26:34.428991 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:34Z","lastTransitionTime":"2025-09-29T21:26:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:34 crc kubenswrapper[4911]: I0929 21:26:34.531459 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:34 crc kubenswrapper[4911]: I0929 21:26:34.531507 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:34 crc kubenswrapper[4911]: I0929 21:26:34.531526 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:34 crc kubenswrapper[4911]: I0929 21:26:34.531548 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:34 crc kubenswrapper[4911]: I0929 21:26:34.531570 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:34Z","lastTransitionTime":"2025-09-29T21:26:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:34 crc kubenswrapper[4911]: I0929 21:26:34.633344 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:34 crc kubenswrapper[4911]: I0929 21:26:34.633386 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:34 crc kubenswrapper[4911]: I0929 21:26:34.633403 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:34 crc kubenswrapper[4911]: I0929 21:26:34.633424 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:34 crc kubenswrapper[4911]: I0929 21:26:34.633469 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:34Z","lastTransitionTime":"2025-09-29T21:26:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:34 crc kubenswrapper[4911]: I0929 21:26:34.700708 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 21:26:34 crc kubenswrapper[4911]: I0929 21:26:34.700954 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-d5gdh" Sep 29 21:26:34 crc kubenswrapper[4911]: I0929 21:26:34.700933 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:26:34 crc kubenswrapper[4911]: E0929 21:26:34.701101 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 21:26:34 crc kubenswrapper[4911]: E0929 21:26:34.701201 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 21:26:34 crc kubenswrapper[4911]: E0929 21:26:34.701368 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-d5gdh" podUID="b53f9593-39bf-43e0-b1de-09192d0167cd" Sep 29 21:26:34 crc kubenswrapper[4911]: I0929 21:26:34.735121 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:34 crc kubenswrapper[4911]: I0929 21:26:34.735167 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:34 crc kubenswrapper[4911]: I0929 21:26:34.735177 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:34 crc kubenswrapper[4911]: I0929 21:26:34.735194 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:34 crc kubenswrapper[4911]: I0929 21:26:34.735204 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:34Z","lastTransitionTime":"2025-09-29T21:26:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:34 crc kubenswrapper[4911]: I0929 21:26:34.838091 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:34 crc kubenswrapper[4911]: I0929 21:26:34.838165 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:34 crc kubenswrapper[4911]: I0929 21:26:34.838188 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:34 crc kubenswrapper[4911]: I0929 21:26:34.838222 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:34 crc kubenswrapper[4911]: I0929 21:26:34.838246 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:34Z","lastTransitionTime":"2025-09-29T21:26:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:34 crc kubenswrapper[4911]: I0929 21:26:34.942807 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:34 crc kubenswrapper[4911]: I0929 21:26:34.942864 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:34 crc kubenswrapper[4911]: I0929 21:26:34.942875 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:34 crc kubenswrapper[4911]: I0929 21:26:34.942895 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:34 crc kubenswrapper[4911]: I0929 21:26:34.942908 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:34Z","lastTransitionTime":"2025-09-29T21:26:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:35 crc kubenswrapper[4911]: I0929 21:26:35.045886 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:35 crc kubenswrapper[4911]: I0929 21:26:35.045940 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:35 crc kubenswrapper[4911]: I0929 21:26:35.045952 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:35 crc kubenswrapper[4911]: I0929 21:26:35.045972 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:35 crc kubenswrapper[4911]: I0929 21:26:35.045985 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:35Z","lastTransitionTime":"2025-09-29T21:26:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:35 crc kubenswrapper[4911]: I0929 21:26:35.149310 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:35 crc kubenswrapper[4911]: I0929 21:26:35.149385 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:35 crc kubenswrapper[4911]: I0929 21:26:35.149399 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:35 crc kubenswrapper[4911]: I0929 21:26:35.149425 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:35 crc kubenswrapper[4911]: I0929 21:26:35.149440 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:35Z","lastTransitionTime":"2025-09-29T21:26:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:35 crc kubenswrapper[4911]: I0929 21:26:35.251664 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:35 crc kubenswrapper[4911]: I0929 21:26:35.251721 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:35 crc kubenswrapper[4911]: I0929 21:26:35.251736 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:35 crc kubenswrapper[4911]: I0929 21:26:35.251756 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:35 crc kubenswrapper[4911]: I0929 21:26:35.251772 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:35Z","lastTransitionTime":"2025-09-29T21:26:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:35 crc kubenswrapper[4911]: I0929 21:26:35.354036 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:35 crc kubenswrapper[4911]: I0929 21:26:35.354085 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:35 crc kubenswrapper[4911]: I0929 21:26:35.354105 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:35 crc kubenswrapper[4911]: I0929 21:26:35.354130 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:35 crc kubenswrapper[4911]: I0929 21:26:35.354145 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:35Z","lastTransitionTime":"2025-09-29T21:26:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:35 crc kubenswrapper[4911]: I0929 21:26:35.456452 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:35 crc kubenswrapper[4911]: I0929 21:26:35.456521 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:35 crc kubenswrapper[4911]: I0929 21:26:35.456541 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:35 crc kubenswrapper[4911]: I0929 21:26:35.456571 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:35 crc kubenswrapper[4911]: I0929 21:26:35.456590 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:35Z","lastTransitionTime":"2025-09-29T21:26:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:35 crc kubenswrapper[4911]: I0929 21:26:35.558847 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:35 crc kubenswrapper[4911]: I0929 21:26:35.558896 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:35 crc kubenswrapper[4911]: I0929 21:26:35.558906 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:35 crc kubenswrapper[4911]: I0929 21:26:35.558923 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:35 crc kubenswrapper[4911]: I0929 21:26:35.558936 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:35Z","lastTransitionTime":"2025-09-29T21:26:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:35 crc kubenswrapper[4911]: I0929 21:26:35.662463 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:35 crc kubenswrapper[4911]: I0929 21:26:35.662513 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:35 crc kubenswrapper[4911]: I0929 21:26:35.662526 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:35 crc kubenswrapper[4911]: I0929 21:26:35.662546 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:35 crc kubenswrapper[4911]: I0929 21:26:35.662561 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:35Z","lastTransitionTime":"2025-09-29T21:26:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:35 crc kubenswrapper[4911]: I0929 21:26:35.699949 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 21:26:35 crc kubenswrapper[4911]: E0929 21:26:35.700233 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 21:26:35 crc kubenswrapper[4911]: I0929 21:26:35.701729 4911 scope.go:117] "RemoveContainer" containerID="de700bedd50f64841f1af98c4ba1a6b209e5990da83985a88e512b3079391876" Sep 29 21:26:35 crc kubenswrapper[4911]: I0929 21:26:35.765594 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:35 crc kubenswrapper[4911]: I0929 21:26:35.765678 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:35 crc kubenswrapper[4911]: I0929 21:26:35.765712 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:35 crc kubenswrapper[4911]: I0929 21:26:35.765779 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:35 crc kubenswrapper[4911]: I0929 21:26:35.765849 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:35Z","lastTransitionTime":"2025-09-29T21:26:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:35 crc kubenswrapper[4911]: I0929 21:26:35.869065 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:35 crc kubenswrapper[4911]: I0929 21:26:35.869112 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:35 crc kubenswrapper[4911]: I0929 21:26:35.869121 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:35 crc kubenswrapper[4911]: I0929 21:26:35.869141 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:35 crc kubenswrapper[4911]: I0929 21:26:35.869151 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:35Z","lastTransitionTime":"2025-09-29T21:26:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:35 crc kubenswrapper[4911]: I0929 21:26:35.972054 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:35 crc kubenswrapper[4911]: I0929 21:26:35.972121 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:35 crc kubenswrapper[4911]: I0929 21:26:35.972141 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:35 crc kubenswrapper[4911]: I0929 21:26:35.972168 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:35 crc kubenswrapper[4911]: I0929 21:26:35.972186 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:35Z","lastTransitionTime":"2025-09-29T21:26:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:36 crc kubenswrapper[4911]: I0929 21:26:36.075751 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:36 crc kubenswrapper[4911]: I0929 21:26:36.075868 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:36 crc kubenswrapper[4911]: I0929 21:26:36.075894 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:36 crc kubenswrapper[4911]: I0929 21:26:36.075925 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:36 crc kubenswrapper[4911]: I0929 21:26:36.075952 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:36Z","lastTransitionTime":"2025-09-29T21:26:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:36 crc kubenswrapper[4911]: I0929 21:26:36.178640 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:36 crc kubenswrapper[4911]: I0929 21:26:36.178684 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:36 crc kubenswrapper[4911]: I0929 21:26:36.178698 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:36 crc kubenswrapper[4911]: I0929 21:26:36.178716 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:36 crc kubenswrapper[4911]: I0929 21:26:36.178731 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:36Z","lastTransitionTime":"2025-09-29T21:26:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:36 crc kubenswrapper[4911]: I0929 21:26:36.240370 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-9wxd8_4e3aa70f-b0da-44c9-a850-96d4494b02fc/ovnkube-controller/2.log" Sep 29 21:26:36 crc kubenswrapper[4911]: I0929 21:26:36.243991 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" event={"ID":"4e3aa70f-b0da-44c9-a850-96d4494b02fc","Type":"ContainerStarted","Data":"9af670a0090bf77a03f27eaa0928873b2cbb836a1ac9bd6e176711e06eff996f"} Sep 29 21:26:36 crc kubenswrapper[4911]: I0929 21:26:36.244559 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" Sep 29 21:26:36 crc kubenswrapper[4911]: I0929 21:26:36.271932 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w647f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50640abc-40db-4390-82d1-f3cfc76da71c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0479676aa753b6e60f9a57eb2a3055ddc5b10bce1547ac60e216c0865e79f24b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://568d099d7beea914cf9f9eb703e36dd179359d3f4406bae454334e39b0de79ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w647f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:36Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:36 crc kubenswrapper[4911]: I0929 21:26:36.281240 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:36 crc kubenswrapper[4911]: I0929 21:26:36.281293 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:36 crc kubenswrapper[4911]: I0929 21:26:36.281306 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:36 crc kubenswrapper[4911]: I0929 21:26:36.281330 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:36 crc kubenswrapper[4911]: I0929 21:26:36.281342 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:36Z","lastTransitionTime":"2025-09-29T21:26:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:36 crc kubenswrapper[4911]: I0929 21:26:36.304336 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f56832a-7fd7-428e-85ee-55c5dea7b67a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ff3cf4013e69405637fe1483d5eb8ab2d12a0ba222c8446d93445dcb94ac9b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b32578d3e75bb839ad134f88850ba1b26e57846431fac71b64c5ddbce802464\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4539e6f57be0c686d5ea6ccbf1537c7c93698f81d2fdcfe5a143ce1f014e09\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57c9f68393ebed4fd5fe219544b0585a743989f8c54239e19ec702bddf9c30f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9dfa222360c949632050bd1dc5453a14c0dfa0d4614282132ceea2297405443e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T21:25:36Z\\\",\\\"message\\\":\\\"W0929 21:25:25.867198 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 21:25:25.867655 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759181125 cert, and key in /tmp/serving-cert-2091738680/serving-signer.crt, /tmp/serving-cert-2091738680/serving-signer.key\\\\nI0929 21:25:26.230899 1 observer_polling.go:159] Starting file observer\\\\nW0929 21:25:26.235864 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 21:25:26.236118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 21:25:26.238856 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2091738680/tls.crt::/tmp/serving-cert-2091738680/tls.key\\\\\\\"\\\\nF0929 21:25:36.606155 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47cf5d1e65f887e84d3c0694d5038edf3451c4af14d08a29d48562d05e36ea26\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:36Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:36 crc kubenswrapper[4911]: I0929 21:26:36.322384 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8646497-f7ad-4b38-a69f-9a14345af1c8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d3bb437bdac371dd4e22f20f304a5381c128ac2b774c51bc4f748a1c7aa3834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://129d51f84c8798ed7b1078f34899d4231c7bf4e1a75212f694c6c027b8463b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://290e37c77297fae7dcb3540911f6b2917e26daee3623c9106e34de394ccfa01f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7088f5a3752b8778de247eacb408d511197d1501e8e2811a98a9b713ca1fca04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:36Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:36 crc kubenswrapper[4911]: I0929 21:26:36.339530 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:36Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:36 crc kubenswrapper[4911]: I0929 21:26:36.355859 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:36Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:36 crc kubenswrapper[4911]: I0929 21:26:36.372975 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4052e710e0a06bf38d26e6006248d090051284bc0eae7a68b17241ef8016909\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a7d7fad5efb67cc7ef796977f03cc9c0e69fcc4340ca28749fffcc13236ed8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:36Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:36 crc kubenswrapper[4911]: I0929 21:26:36.383823 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:36 crc kubenswrapper[4911]: I0929 21:26:36.383884 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:36 crc kubenswrapper[4911]: I0929 21:26:36.383897 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:36 crc kubenswrapper[4911]: I0929 21:26:36.383926 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:36 crc kubenswrapper[4911]: I0929 21:26:36.383948 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:36Z","lastTransitionTime":"2025-09-29T21:26:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:36 crc kubenswrapper[4911]: I0929 21:26:36.390362 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lrfbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1179c900-e866-4c5a-bb06-6032cc03a075\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc7df21f84f39b5c19f6039c6a102c478f158684a5952a968322654eecec14fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bb16b2f889ccaf66b96ffea2f6585967c14530a95a95000b652ce817e97c37e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T21:26:32Z\\\",\\\"message\\\":\\\"2025-09-29T21:25:46+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_ffacfa39-705c-4aac-940e-47d0cb57c1d5\\\\n2025-09-29T21:25:46+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_ffacfa39-705c-4aac-940e-47d0cb57c1d5 to /host/opt/cni/bin/\\\\n2025-09-29T21:25:47Z [verbose] multus-daemon started\\\\n2025-09-29T21:25:47Z [verbose] Readiness Indicator file check\\\\n2025-09-29T21:26:32Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:26:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jwhfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lrfbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:36Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:36 crc kubenswrapper[4911]: I0929 21:26:36.417128 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e3aa70f-b0da-44c9-a850-96d4494b02fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3568f71a4a1762e0d9be8ce4c317471f3f99a7c5b92b7daeb9fbace778f7e66b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00681163bfe474dabf977e0c43ac71148c2a472efd4bc57f3d145ba1134a74aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db1941e9959584d925b6ac546c234ce0dfaa2960a1277f282b51deb5ee56ec87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e87c1e61cde26c6ae4496e13ebe8e46857c8656720399f3d89b9e9a75bb4681d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c06f6555e780f2f9f69cabcd3e26595acfde3e973387ed7b10345c673c17f5d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c62aa953604f4ce35ca351ecf4e03da067af96f004c3ba3ea5f3a54b89def33d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9af670a0090bf77a03f27eaa0928873b2cbb836a1ac9bd6e176711e06eff996f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de700bedd50f64841f1af98c4ba1a6b209e5990da83985a88e512b3079391876\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T21:26:11Z\\\",\\\"message\\\":\\\"troller\\\\nI0929 21:26:11.697630 6582 admin_network_policy_namespace.go:56] Finished syncing Namespace openshift-service-ca-operator Admin Network Policy controller: took 9.93µs\\\\nI0929 21:26:11.697571 6582 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0929 21:26:11.697666 6582 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0929 21:26:11.697693 6582 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0929 21:26:11.697699 6582 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0929 21:26:11.697816 6582 factory.go:656] Stopping watch factory\\\\nI0929 21:26:11.697792 6582 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0929 21:26:11.697812 6582 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0929 21:26:11.697832 6582 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0929 21:26:11.697843 6582 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0929 21:26:11.697940 6582 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0929 21:26:11.698057 6582 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0929 21:26:11.698113 6582 ovnkube.go:599] Stopped ovnkube\\\\nI0929 21:26:11.698146 6582 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0929 21:26:11.698263 6582 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:26:10Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68ec54cc6e84d45b9550a2dcbd53b3ffc8cc97cb94da8d9961b29965af08b7e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9wxd8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:36Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:36 crc kubenswrapper[4911]: I0929 21:26:36.430597 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-d5gdh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b53f9593-39bf-43e0-b1de-09192d0167cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-24xkh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-24xkh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:57Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-d5gdh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:36Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:36 crc kubenswrapper[4911]: I0929 21:26:36.444458 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a2ba6d0-d11e-4f12-b9d7-f9a5b97d306a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9e660ab2714b6ff9ddefb3634d2ae48dabf0a144b5f9ba96d429654fde989f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed4c0d97fe39092f5b2cf1e4575d8ea9238b60085270aec20f28727379f9a1a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4aab1c186df02eac1c6a9a54cf66510d44e4c63bad2da3cdbe53923869cc01cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7e8a2aec2b50058d3e0f16b0756724feebf54c180e50dbbec1dc319c31234bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7e8a2aec2b50058d3e0f16b0756724feebf54c180e50dbbec1dc319c31234bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:36Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:36 crc kubenswrapper[4911]: I0929 21:26:36.461595 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:36Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:36 crc kubenswrapper[4911]: I0929 21:26:36.475508 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a92f3bb864cba0c6615e8a07233e923c0d0681e83ad6722aa7c7043fb5966d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:36Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:36 crc kubenswrapper[4911]: I0929 21:26:36.486489 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:36 crc kubenswrapper[4911]: I0929 21:26:36.486584 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:36 crc kubenswrapper[4911]: I0929 21:26:36.486654 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:36 crc kubenswrapper[4911]: I0929 21:26:36.486690 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:36 crc kubenswrapper[4911]: I0929 21:26:36.486748 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:36Z","lastTransitionTime":"2025-09-29T21:26:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:36 crc kubenswrapper[4911]: I0929 21:26:36.492334 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cfbgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5b94596-b945-4a89-b362-ec649e8e7981\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5154aa6a1a3a9b1a5a58752607c01b955e28b7e896bf9b99b5cca2448077ebd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm7xf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1f8c9f36d4fa9ccfe9c4fdf2794b819a0de928056fc3c320b6c83f9c518258f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm7xf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:56Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-cfbgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:36Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:36 crc kubenswrapper[4911]: I0929 21:26:36.516283 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85fccbbf-637b-4195-89ba-828db0f10fb3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11c497bb84920c85649975dc5a3ba883723017d5503200e47c08427f45b9edfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a594d1677deac968c88fac4a64d4b047cf02ad8d6db74ee2ebe170d4135f588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2db0acd86c2dfcd0303626aa6b66832b18aec98dab908328288cdf0e0a18f24d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72188960402faa7956adf357824c6ea0a7015a75ed5191853ca95b12feb2cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf2588ddec2be0fa843630cdc268617c399a7a1615705cc4b088fc806031d557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0546d8270e2cbba06a241e421fa7ea130e53302a3f71f81829198d7ab8f3e6b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0546d8270e2cbba06a241e421fa7ea130e53302a3f71f81829198d7ab8f3e6b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123ea91d0a01f780d09196824c362eab68a4b8d943d414e80a3a0bbc2959e178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://123ea91d0a01f780d09196824c362eab68a4b8d943d414e80a3a0bbc2959e178\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://17751d3a9dd568ddb6c27efe08354f37aa4af7a91312e8c41e3e2e509d5be8ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://17751d3a9dd568ddb6c27efe08354f37aa4af7a91312e8c41e3e2e509d5be8ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:36Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:36 crc kubenswrapper[4911]: I0929 21:26:36.532970 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c6a0af8cc03379c6fa2a3969e5852a340c32bd8513dc7c963e074bb1694d436\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:36Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:36 crc kubenswrapper[4911]: I0929 21:26:36.546686 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dnhjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fe62a53-f0b8-4c66-8adf-3b9f8bef4195\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b11f03867ce6c678d04431a19903c21acd7665f0cca4c7b0344e9d1cf0e17e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64hpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dnhjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:36Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:36 crc kubenswrapper[4911]: I0929 21:26:36.567275 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bp485" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"364060da-3bac-4f3e-b8b8-a64b0441cb5e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7dba9267c23bc8990d70b320a688e96796cf75aca99f5a3ce12fa82327cb2d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7962008bbec5a6556b4ab76cf151a68e971bde07fdc5fe560c561f42cf597d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7962008bbec5a6556b4ab76cf151a68e971bde07fdc5fe560c561f42cf597d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a6af280cf9196a9ed2a1798f7f4e7d6504ef6707c1d265fb9b232c02d70c205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a6af280cf9196a9ed2a1798f7f4e7d6504ef6707c1d265fb9b232c02d70c205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4783145a17a1233241a3766d9b259d16ed82b2076ff6790c1a368d713271cd91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4783145a17a1233241a3766d9b259d16ed82b2076ff6790c1a368d713271cd91\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0bb5fec15ae4eb585d8ba1c0bedabe62a4094ca50044a09b44439dc555f064d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0bb5fec15ae4eb585d8ba1c0bedabe62a4094ca50044a09b44439dc555f064d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c153df9063b1a1e897e8d02e139d641d00d84dce42004be4c69dd55d44e7436b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c153df9063b1a1e897e8d02e139d641d00d84dce42004be4c69dd55d44e7436b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9acf5d7a17bf6bb6d1ad415d830594b8f6b986fbb7c58706b19d28546dfae6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9acf5d7a17bf6bb6d1ad415d830594b8f6b986fbb7c58706b19d28546dfae6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bp485\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:36Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:36 crc kubenswrapper[4911]: I0929 21:26:36.583055 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dz6zq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c16af8d-e647-4820-b96f-298cce113ab1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e64c28e47bb861649d58b34f5e3c369cfaf8c808435b32a13ebfc2c02b7b6db9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbgg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dz6zq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:36Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:36 crc kubenswrapper[4911]: I0929 21:26:36.590464 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:36 crc kubenswrapper[4911]: I0929 21:26:36.590515 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:36 crc kubenswrapper[4911]: I0929 21:26:36.590526 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:36 crc kubenswrapper[4911]: I0929 21:26:36.590548 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:36 crc kubenswrapper[4911]: I0929 21:26:36.590558 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:36Z","lastTransitionTime":"2025-09-29T21:26:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:36 crc kubenswrapper[4911]: I0929 21:26:36.694646 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:36 crc kubenswrapper[4911]: I0929 21:26:36.694720 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:36 crc kubenswrapper[4911]: I0929 21:26:36.694741 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:36 crc kubenswrapper[4911]: I0929 21:26:36.694813 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:36 crc kubenswrapper[4911]: I0929 21:26:36.694834 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:36Z","lastTransitionTime":"2025-09-29T21:26:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:36 crc kubenswrapper[4911]: I0929 21:26:36.700960 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 21:26:36 crc kubenswrapper[4911]: I0929 21:26:36.701028 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-d5gdh" Sep 29 21:26:36 crc kubenswrapper[4911]: I0929 21:26:36.700970 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:26:36 crc kubenswrapper[4911]: E0929 21:26:36.701205 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 21:26:36 crc kubenswrapper[4911]: E0929 21:26:36.701478 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 21:26:36 crc kubenswrapper[4911]: E0929 21:26:36.701626 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-d5gdh" podUID="b53f9593-39bf-43e0-b1de-09192d0167cd" Sep 29 21:26:36 crc kubenswrapper[4911]: I0929 21:26:36.798671 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:36 crc kubenswrapper[4911]: I0929 21:26:36.798740 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:36 crc kubenswrapper[4911]: I0929 21:26:36.798755 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:36 crc kubenswrapper[4911]: I0929 21:26:36.798779 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:36 crc kubenswrapper[4911]: I0929 21:26:36.798819 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:36Z","lastTransitionTime":"2025-09-29T21:26:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:36 crc kubenswrapper[4911]: I0929 21:26:36.902208 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:36 crc kubenswrapper[4911]: I0929 21:26:36.902257 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:36 crc kubenswrapper[4911]: I0929 21:26:36.902271 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:36 crc kubenswrapper[4911]: I0929 21:26:36.902290 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:36 crc kubenswrapper[4911]: I0929 21:26:36.902303 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:36Z","lastTransitionTime":"2025-09-29T21:26:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.006191 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.006273 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.006303 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.006333 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.006352 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:37Z","lastTransitionTime":"2025-09-29T21:26:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.109713 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.109778 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.109835 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.109875 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.109905 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:37Z","lastTransitionTime":"2025-09-29T21:26:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.213992 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.214070 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.214089 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.214117 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.214139 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:37Z","lastTransitionTime":"2025-09-29T21:26:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.252244 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-9wxd8_4e3aa70f-b0da-44c9-a850-96d4494b02fc/ovnkube-controller/3.log" Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.253597 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-9wxd8_4e3aa70f-b0da-44c9-a850-96d4494b02fc/ovnkube-controller/2.log" Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.259021 4911 generic.go:334] "Generic (PLEG): container finished" podID="4e3aa70f-b0da-44c9-a850-96d4494b02fc" containerID="9af670a0090bf77a03f27eaa0928873b2cbb836a1ac9bd6e176711e06eff996f" exitCode=1 Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.259090 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" event={"ID":"4e3aa70f-b0da-44c9-a850-96d4494b02fc","Type":"ContainerDied","Data":"9af670a0090bf77a03f27eaa0928873b2cbb836a1ac9bd6e176711e06eff996f"} Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.259185 4911 scope.go:117] "RemoveContainer" containerID="de700bedd50f64841f1af98c4ba1a6b209e5990da83985a88e512b3079391876" Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.261038 4911 scope.go:117] "RemoveContainer" containerID="9af670a0090bf77a03f27eaa0928873b2cbb836a1ac9bd6e176711e06eff996f" Sep 29 21:26:37 crc kubenswrapper[4911]: E0929 21:26:37.261766 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-9wxd8_openshift-ovn-kubernetes(4e3aa70f-b0da-44c9-a850-96d4494b02fc)\"" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" podUID="4e3aa70f-b0da-44c9-a850-96d4494b02fc" Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.280415 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a2ba6d0-d11e-4f12-b9d7-f9a5b97d306a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9e660ab2714b6ff9ddefb3634d2ae48dabf0a144b5f9ba96d429654fde989f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed4c0d97fe39092f5b2cf1e4575d8ea9238b60085270aec20f28727379f9a1a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4aab1c186df02eac1c6a9a54cf66510d44e4c63bad2da3cdbe53923869cc01cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7e8a2aec2b50058d3e0f16b0756724feebf54c180e50dbbec1dc319c31234bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7e8a2aec2b50058d3e0f16b0756724feebf54c180e50dbbec1dc319c31234bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:37Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.302151 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:37Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.319169 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.319257 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.319281 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.319315 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.319335 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:37Z","lastTransitionTime":"2025-09-29T21:26:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.325442 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a92f3bb864cba0c6615e8a07233e923c0d0681e83ad6722aa7c7043fb5966d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:37Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.347081 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cfbgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5b94596-b945-4a89-b362-ec649e8e7981\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5154aa6a1a3a9b1a5a58752607c01b955e28b7e896bf9b99b5cca2448077ebd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm7xf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1f8c9f36d4fa9ccfe9c4fdf2794b819a0de928056fc3c320b6c83f9c518258f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm7xf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:56Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-cfbgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:37Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.363770 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c6a0af8cc03379c6fa2a3969e5852a340c32bd8513dc7c963e074bb1694d436\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:37Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.380907 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dnhjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fe62a53-f0b8-4c66-8adf-3b9f8bef4195\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b11f03867ce6c678d04431a19903c21acd7665f0cca4c7b0344e9d1cf0e17e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64hpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dnhjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:37Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.403017 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bp485" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"364060da-3bac-4f3e-b8b8-a64b0441cb5e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7dba9267c23bc8990d70b320a688e96796cf75aca99f5a3ce12fa82327cb2d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7962008bbec5a6556b4ab76cf151a68e971bde07fdc5fe560c561f42cf597d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7962008bbec5a6556b4ab76cf151a68e971bde07fdc5fe560c561f42cf597d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a6af280cf9196a9ed2a1798f7f4e7d6504ef6707c1d265fb9b232c02d70c205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a6af280cf9196a9ed2a1798f7f4e7d6504ef6707c1d265fb9b232c02d70c205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4783145a17a1233241a3766d9b259d16ed82b2076ff6790c1a368d713271cd91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4783145a17a1233241a3766d9b259d16ed82b2076ff6790c1a368d713271cd91\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0bb5fec15ae4eb585d8ba1c0bedabe62a4094ca50044a09b44439dc555f064d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0bb5fec15ae4eb585d8ba1c0bedabe62a4094ca50044a09b44439dc555f064d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c153df9063b1a1e897e8d02e139d641d00d84dce42004be4c69dd55d44e7436b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c153df9063b1a1e897e8d02e139d641d00d84dce42004be4c69dd55d44e7436b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9acf5d7a17bf6bb6d1ad415d830594b8f6b986fbb7c58706b19d28546dfae6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9acf5d7a17bf6bb6d1ad415d830594b8f6b986fbb7c58706b19d28546dfae6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bp485\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:37Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.419605 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dz6zq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c16af8d-e647-4820-b96f-298cce113ab1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e64c28e47bb861649d58b34f5e3c369cfaf8c808435b32a13ebfc2c02b7b6db9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbgg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dz6zq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:37Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.422604 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.422658 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.422667 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.422685 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.422697 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:37Z","lastTransitionTime":"2025-09-29T21:26:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.456483 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85fccbbf-637b-4195-89ba-828db0f10fb3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11c497bb84920c85649975dc5a3ba883723017d5503200e47c08427f45b9edfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a594d1677deac968c88fac4a64d4b047cf02ad8d6db74ee2ebe170d4135f588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2db0acd86c2dfcd0303626aa6b66832b18aec98dab908328288cdf0e0a18f24d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72188960402faa7956adf357824c6ea0a7015a75ed5191853ca95b12feb2cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf2588ddec2be0fa843630cdc268617c399a7a1615705cc4b088fc806031d557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0546d8270e2cbba06a241e421fa7ea130e53302a3f71f81829198d7ab8f3e6b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0546d8270e2cbba06a241e421fa7ea130e53302a3f71f81829198d7ab8f3e6b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123ea91d0a01f780d09196824c362eab68a4b8d943d414e80a3a0bbc2959e178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://123ea91d0a01f780d09196824c362eab68a4b8d943d414e80a3a0bbc2959e178\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://17751d3a9dd568ddb6c27efe08354f37aa4af7a91312e8c41e3e2e509d5be8ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://17751d3a9dd568ddb6c27efe08354f37aa4af7a91312e8c41e3e2e509d5be8ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:37Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.476323 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w647f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50640abc-40db-4390-82d1-f3cfc76da71c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0479676aa753b6e60f9a57eb2a3055ddc5b10bce1547ac60e216c0865e79f24b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://568d099d7beea914cf9f9eb703e36dd179359d3f4406bae454334e39b0de79ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w647f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:37Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.494054 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8646497-f7ad-4b38-a69f-9a14345af1c8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d3bb437bdac371dd4e22f20f304a5381c128ac2b774c51bc4f748a1c7aa3834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://129d51f84c8798ed7b1078f34899d4231c7bf4e1a75212f694c6c027b8463b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://290e37c77297fae7dcb3540911f6b2917e26daee3623c9106e34de394ccfa01f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7088f5a3752b8778de247eacb408d511197d1501e8e2811a98a9b713ca1fca04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:37Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.513185 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:37Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.526835 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.526909 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.526922 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.526951 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.526968 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:37Z","lastTransitionTime":"2025-09-29T21:26:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.533515 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:37Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.557978 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4052e710e0a06bf38d26e6006248d090051284bc0eae7a68b17241ef8016909\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a7d7fad5efb67cc7ef796977f03cc9c0e69fcc4340ca28749fffcc13236ed8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:37Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.579088 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lrfbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1179c900-e866-4c5a-bb06-6032cc03a075\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc7df21f84f39b5c19f6039c6a102c478f158684a5952a968322654eecec14fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bb16b2f889ccaf66b96ffea2f6585967c14530a95a95000b652ce817e97c37e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T21:26:32Z\\\",\\\"message\\\":\\\"2025-09-29T21:25:46+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_ffacfa39-705c-4aac-940e-47d0cb57c1d5\\\\n2025-09-29T21:25:46+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_ffacfa39-705c-4aac-940e-47d0cb57c1d5 to /host/opt/cni/bin/\\\\n2025-09-29T21:25:47Z [verbose] multus-daemon started\\\\n2025-09-29T21:25:47Z [verbose] Readiness Indicator file check\\\\n2025-09-29T21:26:32Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:26:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jwhfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lrfbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:37Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.613222 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e3aa70f-b0da-44c9-a850-96d4494b02fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3568f71a4a1762e0d9be8ce4c317471f3f99a7c5b92b7daeb9fbace778f7e66b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00681163bfe474dabf977e0c43ac71148c2a472efd4bc57f3d145ba1134a74aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db1941e9959584d925b6ac546c234ce0dfaa2960a1277f282b51deb5ee56ec87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e87c1e61cde26c6ae4496e13ebe8e46857c8656720399f3d89b9e9a75bb4681d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c06f6555e780f2f9f69cabcd3e26595acfde3e973387ed7b10345c673c17f5d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c62aa953604f4ce35ca351ecf4e03da067af96f004c3ba3ea5f3a54b89def33d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9af670a0090bf77a03f27eaa0928873b2cbb836a1ac9bd6e176711e06eff996f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de700bedd50f64841f1af98c4ba1a6b209e5990da83985a88e512b3079391876\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T21:26:11Z\\\",\\\"message\\\":\\\"troller\\\\nI0929 21:26:11.697630 6582 admin_network_policy_namespace.go:56] Finished syncing Namespace openshift-service-ca-operator Admin Network Policy controller: took 9.93µs\\\\nI0929 21:26:11.697571 6582 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0929 21:26:11.697666 6582 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0929 21:26:11.697693 6582 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0929 21:26:11.697699 6582 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0929 21:26:11.697816 6582 factory.go:656] Stopping watch factory\\\\nI0929 21:26:11.697792 6582 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0929 21:26:11.697812 6582 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0929 21:26:11.697832 6582 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0929 21:26:11.697843 6582 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0929 21:26:11.697940 6582 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0929 21:26:11.698057 6582 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0929 21:26:11.698113 6582 ovnkube.go:599] Stopped ovnkube\\\\nI0929 21:26:11.698146 6582 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0929 21:26:11.698263 6582 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:26:10Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9af670a0090bf77a03f27eaa0928873b2cbb836a1ac9bd6e176711e06eff996f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T21:26:36Z\\\",\\\"message\\\":\\\"36.673520 6929 base_network_controller_pods.go:477] [default/openshift-multus/network-metrics-daemon-d5gdh] creating logical port openshift-multus_network-metrics-daemon-d5gdh for pod on switch crc\\\\nI0929 21:26:36.673566 6929 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-cluster-version/cluster-version-operator\\\\\\\"}\\\\nI0929 21:26:36.673209 6929 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in node crc\\\\nI0929 21:26:36.673611 6929 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb after 0 failed attempt(s)\\\\nI0929 21:26:36.673612 6929 services_controller.go:360] Finished syncing service cluster-version-operator on namespace openshift-cluster-version for network=default : 7.002132ms\\\\nF0929 21:26:36.673632 6929 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node net\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68ec54cc6e84d45b9550a2dcbd53b3ffc8cc97cb94da8d9961b29965af08b7e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9wxd8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:37Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.630005 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.630082 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.630100 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.630129 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.630150 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:37Z","lastTransitionTime":"2025-09-29T21:26:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.633207 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-d5gdh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b53f9593-39bf-43e0-b1de-09192d0167cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-24xkh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-24xkh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:57Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-d5gdh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:37Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.656413 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f56832a-7fd7-428e-85ee-55c5dea7b67a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ff3cf4013e69405637fe1483d5eb8ab2d12a0ba222c8446d93445dcb94ac9b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b32578d3e75bb839ad134f88850ba1b26e57846431fac71b64c5ddbce802464\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4539e6f57be0c686d5ea6ccbf1537c7c93698f81d2fdcfe5a143ce1f014e09\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57c9f68393ebed4fd5fe219544b0585a743989f8c54239e19ec702bddf9c30f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9dfa222360c949632050bd1dc5453a14c0dfa0d4614282132ceea2297405443e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T21:25:36Z\\\",\\\"message\\\":\\\"W0929 21:25:25.867198 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 21:25:25.867655 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759181125 cert, and key in /tmp/serving-cert-2091738680/serving-signer.crt, /tmp/serving-cert-2091738680/serving-signer.key\\\\nI0929 21:25:26.230899 1 observer_polling.go:159] Starting file observer\\\\nW0929 21:25:26.235864 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 21:25:26.236118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 21:25:26.238856 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2091738680/tls.crt::/tmp/serving-cert-2091738680/tls.key\\\\\\\"\\\\nF0929 21:25:36.606155 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47cf5d1e65f887e84d3c0694d5038edf3451c4af14d08a29d48562d05e36ea26\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:37Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.700336 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 21:26:37 crc kubenswrapper[4911]: E0929 21:26:37.700604 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.734462 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.734547 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.734561 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.734586 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.734600 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:37Z","lastTransitionTime":"2025-09-29T21:26:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.839138 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.839244 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.839277 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.839314 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.839338 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:37Z","lastTransitionTime":"2025-09-29T21:26:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.943336 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.943409 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.943434 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.943469 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:37 crc kubenswrapper[4911]: I0929 21:26:37.943493 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:37Z","lastTransitionTime":"2025-09-29T21:26:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:38 crc kubenswrapper[4911]: I0929 21:26:38.046391 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:38 crc kubenswrapper[4911]: I0929 21:26:38.046421 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:38 crc kubenswrapper[4911]: I0929 21:26:38.046433 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:38 crc kubenswrapper[4911]: I0929 21:26:38.046450 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:38 crc kubenswrapper[4911]: I0929 21:26:38.046461 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:38Z","lastTransitionTime":"2025-09-29T21:26:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:38 crc kubenswrapper[4911]: I0929 21:26:38.150031 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:38 crc kubenswrapper[4911]: I0929 21:26:38.150075 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:38 crc kubenswrapper[4911]: I0929 21:26:38.150088 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:38 crc kubenswrapper[4911]: I0929 21:26:38.150105 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:38 crc kubenswrapper[4911]: I0929 21:26:38.150115 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:38Z","lastTransitionTime":"2025-09-29T21:26:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:38 crc kubenswrapper[4911]: I0929 21:26:38.252835 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:38 crc kubenswrapper[4911]: I0929 21:26:38.252876 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:38 crc kubenswrapper[4911]: I0929 21:26:38.252889 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:38 crc kubenswrapper[4911]: I0929 21:26:38.252904 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:38 crc kubenswrapper[4911]: I0929 21:26:38.252917 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:38Z","lastTransitionTime":"2025-09-29T21:26:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:38 crc kubenswrapper[4911]: I0929 21:26:38.264438 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-9wxd8_4e3aa70f-b0da-44c9-a850-96d4494b02fc/ovnkube-controller/3.log" Sep 29 21:26:38 crc kubenswrapper[4911]: I0929 21:26:38.269254 4911 scope.go:117] "RemoveContainer" containerID="9af670a0090bf77a03f27eaa0928873b2cbb836a1ac9bd6e176711e06eff996f" Sep 29 21:26:38 crc kubenswrapper[4911]: E0929 21:26:38.269425 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-9wxd8_openshift-ovn-kubernetes(4e3aa70f-b0da-44c9-a850-96d4494b02fc)\"" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" podUID="4e3aa70f-b0da-44c9-a850-96d4494b02fc" Sep 29 21:26:38 crc kubenswrapper[4911]: I0929 21:26:38.288122 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:38Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:38 crc kubenswrapper[4911]: I0929 21:26:38.306879 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:38Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:38 crc kubenswrapper[4911]: I0929 21:26:38.326493 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4052e710e0a06bf38d26e6006248d090051284bc0eae7a68b17241ef8016909\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a7d7fad5efb67cc7ef796977f03cc9c0e69fcc4340ca28749fffcc13236ed8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:38Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:38 crc kubenswrapper[4911]: I0929 21:26:38.345227 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lrfbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1179c900-e866-4c5a-bb06-6032cc03a075\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc7df21f84f39b5c19f6039c6a102c478f158684a5952a968322654eecec14fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bb16b2f889ccaf66b96ffea2f6585967c14530a95a95000b652ce817e97c37e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T21:26:32Z\\\",\\\"message\\\":\\\"2025-09-29T21:25:46+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_ffacfa39-705c-4aac-940e-47d0cb57c1d5\\\\n2025-09-29T21:25:46+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_ffacfa39-705c-4aac-940e-47d0cb57c1d5 to /host/opt/cni/bin/\\\\n2025-09-29T21:25:47Z [verbose] multus-daemon started\\\\n2025-09-29T21:25:47Z [verbose] Readiness Indicator file check\\\\n2025-09-29T21:26:32Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:26:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jwhfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lrfbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:38Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:38 crc kubenswrapper[4911]: I0929 21:26:38.355725 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:38 crc kubenswrapper[4911]: I0929 21:26:38.356122 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:38 crc kubenswrapper[4911]: I0929 21:26:38.356276 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:38 crc kubenswrapper[4911]: I0929 21:26:38.356424 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:38 crc kubenswrapper[4911]: I0929 21:26:38.356567 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:38Z","lastTransitionTime":"2025-09-29T21:26:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:38 crc kubenswrapper[4911]: I0929 21:26:38.364604 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e3aa70f-b0da-44c9-a850-96d4494b02fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3568f71a4a1762e0d9be8ce4c317471f3f99a7c5b92b7daeb9fbace778f7e66b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00681163bfe474dabf977e0c43ac71148c2a472efd4bc57f3d145ba1134a74aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db1941e9959584d925b6ac546c234ce0dfaa2960a1277f282b51deb5ee56ec87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e87c1e61cde26c6ae4496e13ebe8e46857c8656720399f3d89b9e9a75bb4681d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c06f6555e780f2f9f69cabcd3e26595acfde3e973387ed7b10345c673c17f5d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c62aa953604f4ce35ca351ecf4e03da067af96f004c3ba3ea5f3a54b89def33d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9af670a0090bf77a03f27eaa0928873b2cbb836a1ac9bd6e176711e06eff996f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9af670a0090bf77a03f27eaa0928873b2cbb836a1ac9bd6e176711e06eff996f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T21:26:36Z\\\",\\\"message\\\":\\\"36.673520 6929 base_network_controller_pods.go:477] [default/openshift-multus/network-metrics-daemon-d5gdh] creating logical port openshift-multus_network-metrics-daemon-d5gdh for pod on switch crc\\\\nI0929 21:26:36.673566 6929 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-cluster-version/cluster-version-operator\\\\\\\"}\\\\nI0929 21:26:36.673209 6929 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in node crc\\\\nI0929 21:26:36.673611 6929 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb after 0 failed attempt(s)\\\\nI0929 21:26:36.673612 6929 services_controller.go:360] Finished syncing service cluster-version-operator on namespace openshift-cluster-version for network=default : 7.002132ms\\\\nF0929 21:26:36.673632 6929 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node net\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:26:35Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-9wxd8_openshift-ovn-kubernetes(4e3aa70f-b0da-44c9-a850-96d4494b02fc)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68ec54cc6e84d45b9550a2dcbd53b3ffc8cc97cb94da8d9961b29965af08b7e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9wxd8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:38Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:38 crc kubenswrapper[4911]: I0929 21:26:38.376653 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-d5gdh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b53f9593-39bf-43e0-b1de-09192d0167cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-24xkh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-24xkh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:57Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-d5gdh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:38Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:38 crc kubenswrapper[4911]: I0929 21:26:38.391876 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f56832a-7fd7-428e-85ee-55c5dea7b67a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ff3cf4013e69405637fe1483d5eb8ab2d12a0ba222c8446d93445dcb94ac9b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b32578d3e75bb839ad134f88850ba1b26e57846431fac71b64c5ddbce802464\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4539e6f57be0c686d5ea6ccbf1537c7c93698f81d2fdcfe5a143ce1f014e09\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57c9f68393ebed4fd5fe219544b0585a743989f8c54239e19ec702bddf9c30f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9dfa222360c949632050bd1dc5453a14c0dfa0d4614282132ceea2297405443e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T21:25:36Z\\\",\\\"message\\\":\\\"W0929 21:25:25.867198 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 21:25:25.867655 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759181125 cert, and key in /tmp/serving-cert-2091738680/serving-signer.crt, /tmp/serving-cert-2091738680/serving-signer.key\\\\nI0929 21:25:26.230899 1 observer_polling.go:159] Starting file observer\\\\nW0929 21:25:26.235864 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 21:25:26.236118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 21:25:26.238856 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2091738680/tls.crt::/tmp/serving-cert-2091738680/tls.key\\\\\\\"\\\\nF0929 21:25:36.606155 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47cf5d1e65f887e84d3c0694d5038edf3451c4af14d08a29d48562d05e36ea26\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:38Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:38 crc kubenswrapper[4911]: I0929 21:26:38.402330 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8646497-f7ad-4b38-a69f-9a14345af1c8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d3bb437bdac371dd4e22f20f304a5381c128ac2b774c51bc4f748a1c7aa3834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://129d51f84c8798ed7b1078f34899d4231c7bf4e1a75212f694c6c027b8463b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://290e37c77297fae7dcb3540911f6b2917e26daee3623c9106e34de394ccfa01f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7088f5a3752b8778de247eacb408d511197d1501e8e2811a98a9b713ca1fca04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:38Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:38 crc kubenswrapper[4911]: I0929 21:26:38.412283 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:38Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:38 crc kubenswrapper[4911]: I0929 21:26:38.424506 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a92f3bb864cba0c6615e8a07233e923c0d0681e83ad6722aa7c7043fb5966d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:38Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:38 crc kubenswrapper[4911]: I0929 21:26:38.436619 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cfbgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5b94596-b945-4a89-b362-ec649e8e7981\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5154aa6a1a3a9b1a5a58752607c01b955e28b7e896bf9b99b5cca2448077ebd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm7xf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1f8c9f36d4fa9ccfe9c4fdf2794b819a0de928056fc3c320b6c83f9c518258f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm7xf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:56Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-cfbgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:38Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:38 crc kubenswrapper[4911]: I0929 21:26:38.449216 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a2ba6d0-d11e-4f12-b9d7-f9a5b97d306a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9e660ab2714b6ff9ddefb3634d2ae48dabf0a144b5f9ba96d429654fde989f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed4c0d97fe39092f5b2cf1e4575d8ea9238b60085270aec20f28727379f9a1a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4aab1c186df02eac1c6a9a54cf66510d44e4c63bad2da3cdbe53923869cc01cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7e8a2aec2b50058d3e0f16b0756724feebf54c180e50dbbec1dc319c31234bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7e8a2aec2b50058d3e0f16b0756724feebf54c180e50dbbec1dc319c31234bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:38Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:38 crc kubenswrapper[4911]: I0929 21:26:38.459651 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:38 crc kubenswrapper[4911]: I0929 21:26:38.459694 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:38 crc kubenswrapper[4911]: I0929 21:26:38.459704 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:38 crc kubenswrapper[4911]: I0929 21:26:38.459721 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:38 crc kubenswrapper[4911]: I0929 21:26:38.459734 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:38Z","lastTransitionTime":"2025-09-29T21:26:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:38 crc kubenswrapper[4911]: I0929 21:26:38.462288 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dnhjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fe62a53-f0b8-4c66-8adf-3b9f8bef4195\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b11f03867ce6c678d04431a19903c21acd7665f0cca4c7b0344e9d1cf0e17e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64hpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dnhjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:38Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:38 crc kubenswrapper[4911]: I0929 21:26:38.475749 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bp485" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"364060da-3bac-4f3e-b8b8-a64b0441cb5e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7dba9267c23bc8990d70b320a688e96796cf75aca99f5a3ce12fa82327cb2d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7962008bbec5a6556b4ab76cf151a68e971bde07fdc5fe560c561f42cf597d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7962008bbec5a6556b4ab76cf151a68e971bde07fdc5fe560c561f42cf597d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a6af280cf9196a9ed2a1798f7f4e7d6504ef6707c1d265fb9b232c02d70c205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a6af280cf9196a9ed2a1798f7f4e7d6504ef6707c1d265fb9b232c02d70c205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4783145a17a1233241a3766d9b259d16ed82b2076ff6790c1a368d713271cd91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4783145a17a1233241a3766d9b259d16ed82b2076ff6790c1a368d713271cd91\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0bb5fec15ae4eb585d8ba1c0bedabe62a4094ca50044a09b44439dc555f064d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0bb5fec15ae4eb585d8ba1c0bedabe62a4094ca50044a09b44439dc555f064d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c153df9063b1a1e897e8d02e139d641d00d84dce42004be4c69dd55d44e7436b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c153df9063b1a1e897e8d02e139d641d00d84dce42004be4c69dd55d44e7436b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9acf5d7a17bf6bb6d1ad415d830594b8f6b986fbb7c58706b19d28546dfae6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9acf5d7a17bf6bb6d1ad415d830594b8f6b986fbb7c58706b19d28546dfae6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bp485\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:38Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:38 crc kubenswrapper[4911]: I0929 21:26:38.484219 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dz6zq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c16af8d-e647-4820-b96f-298cce113ab1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e64c28e47bb861649d58b34f5e3c369cfaf8c808435b32a13ebfc2c02b7b6db9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbgg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dz6zq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:38Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:38 crc kubenswrapper[4911]: I0929 21:26:38.501324 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85fccbbf-637b-4195-89ba-828db0f10fb3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11c497bb84920c85649975dc5a3ba883723017d5503200e47c08427f45b9edfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a594d1677deac968c88fac4a64d4b047cf02ad8d6db74ee2ebe170d4135f588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2db0acd86c2dfcd0303626aa6b66832b18aec98dab908328288cdf0e0a18f24d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72188960402faa7956adf357824c6ea0a7015a75ed5191853ca95b12feb2cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf2588ddec2be0fa843630cdc268617c399a7a1615705cc4b088fc806031d557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0546d8270e2cbba06a241e421fa7ea130e53302a3f71f81829198d7ab8f3e6b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0546d8270e2cbba06a241e421fa7ea130e53302a3f71f81829198d7ab8f3e6b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123ea91d0a01f780d09196824c362eab68a4b8d943d414e80a3a0bbc2959e178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://123ea91d0a01f780d09196824c362eab68a4b8d943d414e80a3a0bbc2959e178\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://17751d3a9dd568ddb6c27efe08354f37aa4af7a91312e8c41e3e2e509d5be8ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://17751d3a9dd568ddb6c27efe08354f37aa4af7a91312e8c41e3e2e509d5be8ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:38Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:38 crc kubenswrapper[4911]: I0929 21:26:38.512213 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c6a0af8cc03379c6fa2a3969e5852a340c32bd8513dc7c963e074bb1694d436\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:38Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:38 crc kubenswrapper[4911]: I0929 21:26:38.522262 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w647f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50640abc-40db-4390-82d1-f3cfc76da71c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0479676aa753b6e60f9a57eb2a3055ddc5b10bce1547ac60e216c0865e79f24b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://568d099d7beea914cf9f9eb703e36dd179359d3f4406bae454334e39b0de79ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w647f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:38Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:38 crc kubenswrapper[4911]: I0929 21:26:38.562531 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:38 crc kubenswrapper[4911]: I0929 21:26:38.563086 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:38 crc kubenswrapper[4911]: I0929 21:26:38.563310 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:38 crc kubenswrapper[4911]: I0929 21:26:38.564629 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:38 crc kubenswrapper[4911]: I0929 21:26:38.567029 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:38Z","lastTransitionTime":"2025-09-29T21:26:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:38 crc kubenswrapper[4911]: I0929 21:26:38.669497 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:38 crc kubenswrapper[4911]: I0929 21:26:38.669536 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:38 crc kubenswrapper[4911]: I0929 21:26:38.669547 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:38 crc kubenswrapper[4911]: I0929 21:26:38.669567 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:38 crc kubenswrapper[4911]: I0929 21:26:38.669576 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:38Z","lastTransitionTime":"2025-09-29T21:26:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:38 crc kubenswrapper[4911]: I0929 21:26:38.700625 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-d5gdh" Sep 29 21:26:38 crc kubenswrapper[4911]: I0929 21:26:38.700687 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:26:38 crc kubenswrapper[4911]: I0929 21:26:38.700625 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 21:26:38 crc kubenswrapper[4911]: E0929 21:26:38.700852 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-d5gdh" podUID="b53f9593-39bf-43e0-b1de-09192d0167cd" Sep 29 21:26:38 crc kubenswrapper[4911]: E0929 21:26:38.700935 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 21:26:38 crc kubenswrapper[4911]: E0929 21:26:38.701011 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 21:26:38 crc kubenswrapper[4911]: I0929 21:26:38.771720 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:38 crc kubenswrapper[4911]: I0929 21:26:38.771752 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:38 crc kubenswrapper[4911]: I0929 21:26:38.771761 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:38 crc kubenswrapper[4911]: I0929 21:26:38.771774 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:38 crc kubenswrapper[4911]: I0929 21:26:38.771785 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:38Z","lastTransitionTime":"2025-09-29T21:26:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:38 crc kubenswrapper[4911]: I0929 21:26:38.874849 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:38 crc kubenswrapper[4911]: I0929 21:26:38.874907 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:38 crc kubenswrapper[4911]: I0929 21:26:38.874920 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:38 crc kubenswrapper[4911]: I0929 21:26:38.874945 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:38 crc kubenswrapper[4911]: I0929 21:26:38.874958 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:38Z","lastTransitionTime":"2025-09-29T21:26:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:38 crc kubenswrapper[4911]: I0929 21:26:38.977716 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:38 crc kubenswrapper[4911]: I0929 21:26:38.977758 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:38 crc kubenswrapper[4911]: I0929 21:26:38.977770 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:38 crc kubenswrapper[4911]: I0929 21:26:38.977807 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:38 crc kubenswrapper[4911]: I0929 21:26:38.977818 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:38Z","lastTransitionTime":"2025-09-29T21:26:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:39 crc kubenswrapper[4911]: I0929 21:26:39.081128 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:39 crc kubenswrapper[4911]: I0929 21:26:39.081196 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:39 crc kubenswrapper[4911]: I0929 21:26:39.081214 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:39 crc kubenswrapper[4911]: I0929 21:26:39.081235 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:39 crc kubenswrapper[4911]: I0929 21:26:39.081247 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:39Z","lastTransitionTime":"2025-09-29T21:26:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:39 crc kubenswrapper[4911]: I0929 21:26:39.183745 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:39 crc kubenswrapper[4911]: I0929 21:26:39.183850 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:39 crc kubenswrapper[4911]: I0929 21:26:39.183872 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:39 crc kubenswrapper[4911]: I0929 21:26:39.183901 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:39 crc kubenswrapper[4911]: I0929 21:26:39.183921 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:39Z","lastTransitionTime":"2025-09-29T21:26:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:39 crc kubenswrapper[4911]: I0929 21:26:39.286967 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:39 crc kubenswrapper[4911]: I0929 21:26:39.287330 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:39 crc kubenswrapper[4911]: I0929 21:26:39.287407 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:39 crc kubenswrapper[4911]: I0929 21:26:39.287483 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:39 crc kubenswrapper[4911]: I0929 21:26:39.287545 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:39Z","lastTransitionTime":"2025-09-29T21:26:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:39 crc kubenswrapper[4911]: I0929 21:26:39.390099 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:39 crc kubenswrapper[4911]: I0929 21:26:39.390524 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:39 crc kubenswrapper[4911]: I0929 21:26:39.390655 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:39 crc kubenswrapper[4911]: I0929 21:26:39.390777 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:39 crc kubenswrapper[4911]: I0929 21:26:39.390894 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:39Z","lastTransitionTime":"2025-09-29T21:26:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:39 crc kubenswrapper[4911]: I0929 21:26:39.493466 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:39 crc kubenswrapper[4911]: I0929 21:26:39.493520 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:39 crc kubenswrapper[4911]: I0929 21:26:39.493533 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:39 crc kubenswrapper[4911]: I0929 21:26:39.493552 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:39 crc kubenswrapper[4911]: I0929 21:26:39.493565 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:39Z","lastTransitionTime":"2025-09-29T21:26:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:39 crc kubenswrapper[4911]: I0929 21:26:39.596452 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:39 crc kubenswrapper[4911]: I0929 21:26:39.596499 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:39 crc kubenswrapper[4911]: I0929 21:26:39.596511 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:39 crc kubenswrapper[4911]: I0929 21:26:39.596533 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:39 crc kubenswrapper[4911]: I0929 21:26:39.596544 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:39Z","lastTransitionTime":"2025-09-29T21:26:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:39 crc kubenswrapper[4911]: I0929 21:26:39.699524 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:39 crc kubenswrapper[4911]: I0929 21:26:39.699910 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:39 crc kubenswrapper[4911]: I0929 21:26:39.699977 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:39 crc kubenswrapper[4911]: I0929 21:26:39.700082 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:39 crc kubenswrapper[4911]: I0929 21:26:39.700162 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:39Z","lastTransitionTime":"2025-09-29T21:26:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:39 crc kubenswrapper[4911]: I0929 21:26:39.700196 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 21:26:39 crc kubenswrapper[4911]: E0929 21:26:39.700492 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 21:26:39 crc kubenswrapper[4911]: I0929 21:26:39.802662 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:39 crc kubenswrapper[4911]: I0929 21:26:39.803017 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:39 crc kubenswrapper[4911]: I0929 21:26:39.803134 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:39 crc kubenswrapper[4911]: I0929 21:26:39.803231 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:39 crc kubenswrapper[4911]: I0929 21:26:39.803325 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:39Z","lastTransitionTime":"2025-09-29T21:26:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:39 crc kubenswrapper[4911]: I0929 21:26:39.913693 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:39 crc kubenswrapper[4911]: I0929 21:26:39.913765 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:39 crc kubenswrapper[4911]: I0929 21:26:39.913774 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:39 crc kubenswrapper[4911]: I0929 21:26:39.913843 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:39 crc kubenswrapper[4911]: I0929 21:26:39.913858 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:39Z","lastTransitionTime":"2025-09-29T21:26:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:40 crc kubenswrapper[4911]: I0929 21:26:40.014008 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:40 crc kubenswrapper[4911]: I0929 21:26:40.014071 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:40 crc kubenswrapper[4911]: I0929 21:26:40.014090 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:40 crc kubenswrapper[4911]: I0929 21:26:40.014115 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:40 crc kubenswrapper[4911]: I0929 21:26:40.014133 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:40Z","lastTransitionTime":"2025-09-29T21:26:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:40 crc kubenswrapper[4911]: E0929 21:26:40.033881 4911 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7dded9b5-8ab5-45b2-be5a-4613b6e8208f\\\",\\\"systemUUID\\\":\\\"6cb362cf-0841-40fb-a840-f46642f78745\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:40Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:40 crc kubenswrapper[4911]: I0929 21:26:40.038998 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:40 crc kubenswrapper[4911]: I0929 21:26:40.039054 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:40 crc kubenswrapper[4911]: I0929 21:26:40.039072 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:40 crc kubenswrapper[4911]: I0929 21:26:40.039096 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:40 crc kubenswrapper[4911]: I0929 21:26:40.039114 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:40Z","lastTransitionTime":"2025-09-29T21:26:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:40 crc kubenswrapper[4911]: E0929 21:26:40.060832 4911 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7dded9b5-8ab5-45b2-be5a-4613b6e8208f\\\",\\\"systemUUID\\\":\\\"6cb362cf-0841-40fb-a840-f46642f78745\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:40Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:40 crc kubenswrapper[4911]: I0929 21:26:40.066192 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:40 crc kubenswrapper[4911]: I0929 21:26:40.066226 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:40 crc kubenswrapper[4911]: I0929 21:26:40.066236 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:40 crc kubenswrapper[4911]: I0929 21:26:40.066248 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:40 crc kubenswrapper[4911]: I0929 21:26:40.066258 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:40Z","lastTransitionTime":"2025-09-29T21:26:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:40 crc kubenswrapper[4911]: E0929 21:26:40.085236 4911 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7dded9b5-8ab5-45b2-be5a-4613b6e8208f\\\",\\\"systemUUID\\\":\\\"6cb362cf-0841-40fb-a840-f46642f78745\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:40Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:40 crc kubenswrapper[4911]: I0929 21:26:40.091097 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:40 crc kubenswrapper[4911]: I0929 21:26:40.091174 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:40 crc kubenswrapper[4911]: I0929 21:26:40.091195 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:40 crc kubenswrapper[4911]: I0929 21:26:40.091225 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:40 crc kubenswrapper[4911]: I0929 21:26:40.091246 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:40Z","lastTransitionTime":"2025-09-29T21:26:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:40 crc kubenswrapper[4911]: E0929 21:26:40.133925 4911 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7dded9b5-8ab5-45b2-be5a-4613b6e8208f\\\",\\\"systemUUID\\\":\\\"6cb362cf-0841-40fb-a840-f46642f78745\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:40Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:40 crc kubenswrapper[4911]: I0929 21:26:40.142706 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:40 crc kubenswrapper[4911]: I0929 21:26:40.142768 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:40 crc kubenswrapper[4911]: I0929 21:26:40.142783 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:40 crc kubenswrapper[4911]: I0929 21:26:40.142830 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:40 crc kubenswrapper[4911]: I0929 21:26:40.142856 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:40Z","lastTransitionTime":"2025-09-29T21:26:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:40 crc kubenswrapper[4911]: E0929 21:26:40.180622 4911 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7dded9b5-8ab5-45b2-be5a-4613b6e8208f\\\",\\\"systemUUID\\\":\\\"6cb362cf-0841-40fb-a840-f46642f78745\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:40Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:40 crc kubenswrapper[4911]: E0929 21:26:40.180765 4911 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 29 21:26:40 crc kubenswrapper[4911]: I0929 21:26:40.182921 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:40 crc kubenswrapper[4911]: I0929 21:26:40.182981 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:40 crc kubenswrapper[4911]: I0929 21:26:40.182997 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:40 crc kubenswrapper[4911]: I0929 21:26:40.183017 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:40 crc kubenswrapper[4911]: I0929 21:26:40.183030 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:40Z","lastTransitionTime":"2025-09-29T21:26:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:40 crc kubenswrapper[4911]: I0929 21:26:40.286235 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:40 crc kubenswrapper[4911]: I0929 21:26:40.286316 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:40 crc kubenswrapper[4911]: I0929 21:26:40.286331 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:40 crc kubenswrapper[4911]: I0929 21:26:40.286357 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:40 crc kubenswrapper[4911]: I0929 21:26:40.286373 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:40Z","lastTransitionTime":"2025-09-29T21:26:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:40 crc kubenswrapper[4911]: I0929 21:26:40.389448 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:40 crc kubenswrapper[4911]: I0929 21:26:40.389550 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:40 crc kubenswrapper[4911]: I0929 21:26:40.389570 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:40 crc kubenswrapper[4911]: I0929 21:26:40.389606 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:40 crc kubenswrapper[4911]: I0929 21:26:40.389627 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:40Z","lastTransitionTime":"2025-09-29T21:26:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:40 crc kubenswrapper[4911]: I0929 21:26:40.493424 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:40 crc kubenswrapper[4911]: I0929 21:26:40.493483 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:40 crc kubenswrapper[4911]: I0929 21:26:40.493501 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:40 crc kubenswrapper[4911]: I0929 21:26:40.493523 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:40 crc kubenswrapper[4911]: I0929 21:26:40.493539 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:40Z","lastTransitionTime":"2025-09-29T21:26:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:40 crc kubenswrapper[4911]: I0929 21:26:40.597416 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:40 crc kubenswrapper[4911]: I0929 21:26:40.597499 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:40 crc kubenswrapper[4911]: I0929 21:26:40.597522 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:40 crc kubenswrapper[4911]: I0929 21:26:40.597552 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:40 crc kubenswrapper[4911]: I0929 21:26:40.597577 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:40Z","lastTransitionTime":"2025-09-29T21:26:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:40 crc kubenswrapper[4911]: I0929 21:26:40.700531 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 21:26:40 crc kubenswrapper[4911]: I0929 21:26:40.700651 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:26:40 crc kubenswrapper[4911]: I0929 21:26:40.700668 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-d5gdh" Sep 29 21:26:40 crc kubenswrapper[4911]: E0929 21:26:40.700881 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 21:26:40 crc kubenswrapper[4911]: E0929 21:26:40.701015 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 21:26:40 crc kubenswrapper[4911]: I0929 21:26:40.701107 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:40 crc kubenswrapper[4911]: E0929 21:26:40.701134 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-d5gdh" podUID="b53f9593-39bf-43e0-b1de-09192d0167cd" Sep 29 21:26:40 crc kubenswrapper[4911]: I0929 21:26:40.701147 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:40 crc kubenswrapper[4911]: I0929 21:26:40.701177 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:40 crc kubenswrapper[4911]: I0929 21:26:40.701205 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:40 crc kubenswrapper[4911]: I0929 21:26:40.701225 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:40Z","lastTransitionTime":"2025-09-29T21:26:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:40 crc kubenswrapper[4911]: I0929 21:26:40.803951 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:40 crc kubenswrapper[4911]: I0929 21:26:40.804035 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:40 crc kubenswrapper[4911]: I0929 21:26:40.804052 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:40 crc kubenswrapper[4911]: I0929 21:26:40.804076 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:40 crc kubenswrapper[4911]: I0929 21:26:40.804097 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:40Z","lastTransitionTime":"2025-09-29T21:26:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:40 crc kubenswrapper[4911]: I0929 21:26:40.907005 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:40 crc kubenswrapper[4911]: I0929 21:26:40.907057 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:40 crc kubenswrapper[4911]: I0929 21:26:40.907069 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:40 crc kubenswrapper[4911]: I0929 21:26:40.907091 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:40 crc kubenswrapper[4911]: I0929 21:26:40.907105 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:40Z","lastTransitionTime":"2025-09-29T21:26:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:41 crc kubenswrapper[4911]: I0929 21:26:41.010846 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:41 crc kubenswrapper[4911]: I0929 21:26:41.010900 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:41 crc kubenswrapper[4911]: I0929 21:26:41.010912 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:41 crc kubenswrapper[4911]: I0929 21:26:41.010932 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:41 crc kubenswrapper[4911]: I0929 21:26:41.010943 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:41Z","lastTransitionTime":"2025-09-29T21:26:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:41 crc kubenswrapper[4911]: I0929 21:26:41.114226 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:41 crc kubenswrapper[4911]: I0929 21:26:41.114317 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:41 crc kubenswrapper[4911]: I0929 21:26:41.114337 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:41 crc kubenswrapper[4911]: I0929 21:26:41.114371 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:41 crc kubenswrapper[4911]: I0929 21:26:41.114390 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:41Z","lastTransitionTime":"2025-09-29T21:26:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:41 crc kubenswrapper[4911]: I0929 21:26:41.217409 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:41 crc kubenswrapper[4911]: I0929 21:26:41.217514 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:41 crc kubenswrapper[4911]: I0929 21:26:41.217534 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:41 crc kubenswrapper[4911]: I0929 21:26:41.217568 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:41 crc kubenswrapper[4911]: I0929 21:26:41.217588 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:41Z","lastTransitionTime":"2025-09-29T21:26:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:41 crc kubenswrapper[4911]: I0929 21:26:41.319977 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:41 crc kubenswrapper[4911]: I0929 21:26:41.320198 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:41 crc kubenswrapper[4911]: I0929 21:26:41.320215 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:41 crc kubenswrapper[4911]: I0929 21:26:41.320238 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:41 crc kubenswrapper[4911]: I0929 21:26:41.320252 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:41Z","lastTransitionTime":"2025-09-29T21:26:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:41 crc kubenswrapper[4911]: I0929 21:26:41.423294 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:41 crc kubenswrapper[4911]: I0929 21:26:41.423391 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:41 crc kubenswrapper[4911]: I0929 21:26:41.423417 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:41 crc kubenswrapper[4911]: I0929 21:26:41.423456 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:41 crc kubenswrapper[4911]: I0929 21:26:41.423477 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:41Z","lastTransitionTime":"2025-09-29T21:26:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:41 crc kubenswrapper[4911]: I0929 21:26:41.527056 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:41 crc kubenswrapper[4911]: I0929 21:26:41.527103 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:41 crc kubenswrapper[4911]: I0929 21:26:41.527117 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:41 crc kubenswrapper[4911]: I0929 21:26:41.527140 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:41 crc kubenswrapper[4911]: I0929 21:26:41.527154 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:41Z","lastTransitionTime":"2025-09-29T21:26:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:41 crc kubenswrapper[4911]: I0929 21:26:41.634880 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:41 crc kubenswrapper[4911]: I0929 21:26:41.634957 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:41 crc kubenswrapper[4911]: I0929 21:26:41.634978 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:41 crc kubenswrapper[4911]: I0929 21:26:41.635056 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:41 crc kubenswrapper[4911]: I0929 21:26:41.635085 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:41Z","lastTransitionTime":"2025-09-29T21:26:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:41 crc kubenswrapper[4911]: I0929 21:26:41.700974 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 21:26:41 crc kubenswrapper[4911]: E0929 21:26:41.701219 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 21:26:41 crc kubenswrapper[4911]: I0929 21:26:41.739268 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:41 crc kubenswrapper[4911]: I0929 21:26:41.739331 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:41 crc kubenswrapper[4911]: I0929 21:26:41.739350 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:41 crc kubenswrapper[4911]: I0929 21:26:41.739382 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:41 crc kubenswrapper[4911]: I0929 21:26:41.739404 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:41Z","lastTransitionTime":"2025-09-29T21:26:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:41 crc kubenswrapper[4911]: I0929 21:26:41.843416 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:41 crc kubenswrapper[4911]: I0929 21:26:41.843489 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:41 crc kubenswrapper[4911]: I0929 21:26:41.843508 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:41 crc kubenswrapper[4911]: I0929 21:26:41.843538 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:41 crc kubenswrapper[4911]: I0929 21:26:41.843559 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:41Z","lastTransitionTime":"2025-09-29T21:26:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:41 crc kubenswrapper[4911]: I0929 21:26:41.946881 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:41 crc kubenswrapper[4911]: I0929 21:26:41.946948 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:41 crc kubenswrapper[4911]: I0929 21:26:41.947149 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:41 crc kubenswrapper[4911]: I0929 21:26:41.947177 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:41 crc kubenswrapper[4911]: I0929 21:26:41.947199 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:41Z","lastTransitionTime":"2025-09-29T21:26:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:42 crc kubenswrapper[4911]: I0929 21:26:42.051164 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:42 crc kubenswrapper[4911]: I0929 21:26:42.051773 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:42 crc kubenswrapper[4911]: I0929 21:26:42.051785 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:42 crc kubenswrapper[4911]: I0929 21:26:42.051839 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:42 crc kubenswrapper[4911]: I0929 21:26:42.051850 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:42Z","lastTransitionTime":"2025-09-29T21:26:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:42 crc kubenswrapper[4911]: I0929 21:26:42.155704 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:42 crc kubenswrapper[4911]: I0929 21:26:42.155838 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:42 crc kubenswrapper[4911]: I0929 21:26:42.155888 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:42 crc kubenswrapper[4911]: I0929 21:26:42.155922 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:42 crc kubenswrapper[4911]: I0929 21:26:42.155984 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:42Z","lastTransitionTime":"2025-09-29T21:26:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:42 crc kubenswrapper[4911]: I0929 21:26:42.260406 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:42 crc kubenswrapper[4911]: I0929 21:26:42.260480 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:42 crc kubenswrapper[4911]: I0929 21:26:42.260489 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:42 crc kubenswrapper[4911]: I0929 21:26:42.260510 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:42 crc kubenswrapper[4911]: I0929 21:26:42.260521 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:42Z","lastTransitionTime":"2025-09-29T21:26:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:42 crc kubenswrapper[4911]: I0929 21:26:42.370033 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:42 crc kubenswrapper[4911]: I0929 21:26:42.370106 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:42 crc kubenswrapper[4911]: I0929 21:26:42.370124 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:42 crc kubenswrapper[4911]: I0929 21:26:42.370167 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:42 crc kubenswrapper[4911]: I0929 21:26:42.370188 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:42Z","lastTransitionTime":"2025-09-29T21:26:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:42 crc kubenswrapper[4911]: I0929 21:26:42.473392 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:42 crc kubenswrapper[4911]: I0929 21:26:42.473512 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:42 crc kubenswrapper[4911]: I0929 21:26:42.473544 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:42 crc kubenswrapper[4911]: I0929 21:26:42.473575 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:42 crc kubenswrapper[4911]: I0929 21:26:42.473597 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:42Z","lastTransitionTime":"2025-09-29T21:26:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:42 crc kubenswrapper[4911]: I0929 21:26:42.577154 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:42 crc kubenswrapper[4911]: I0929 21:26:42.577271 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:42 crc kubenswrapper[4911]: I0929 21:26:42.577291 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:42 crc kubenswrapper[4911]: I0929 21:26:42.577320 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:42 crc kubenswrapper[4911]: I0929 21:26:42.577341 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:42Z","lastTransitionTime":"2025-09-29T21:26:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:42 crc kubenswrapper[4911]: I0929 21:26:42.680419 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:42 crc kubenswrapper[4911]: I0929 21:26:42.680493 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:42 crc kubenswrapper[4911]: I0929 21:26:42.680513 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:42 crc kubenswrapper[4911]: I0929 21:26:42.680543 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:42 crc kubenswrapper[4911]: I0929 21:26:42.680567 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:42Z","lastTransitionTime":"2025-09-29T21:26:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:42 crc kubenswrapper[4911]: I0929 21:26:42.701371 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:26:42 crc kubenswrapper[4911]: I0929 21:26:42.701433 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-d5gdh" Sep 29 21:26:42 crc kubenswrapper[4911]: I0929 21:26:42.701388 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 21:26:42 crc kubenswrapper[4911]: E0929 21:26:42.701610 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 21:26:42 crc kubenswrapper[4911]: E0929 21:26:42.701767 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 21:26:42 crc kubenswrapper[4911]: E0929 21:26:42.702149 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-d5gdh" podUID="b53f9593-39bf-43e0-b1de-09192d0167cd" Sep 29 21:26:42 crc kubenswrapper[4911]: I0929 21:26:42.718050 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Sep 29 21:26:42 crc kubenswrapper[4911]: I0929 21:26:42.720786 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dnhjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fe62a53-f0b8-4c66-8adf-3b9f8bef4195\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b11f03867ce6c678d04431a19903c21acd7665f0cca4c7b0344e9d1cf0e17e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64hpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dnhjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:42Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:42 crc kubenswrapper[4911]: I0929 21:26:42.744381 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bp485" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"364060da-3bac-4f3e-b8b8-a64b0441cb5e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7dba9267c23bc8990d70b320a688e96796cf75aca99f5a3ce12fa82327cb2d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7962008bbec5a6556b4ab76cf151a68e971bde07fdc5fe560c561f42cf597d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7962008bbec5a6556b4ab76cf151a68e971bde07fdc5fe560c561f42cf597d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a6af280cf9196a9ed2a1798f7f4e7d6504ef6707c1d265fb9b232c02d70c205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a6af280cf9196a9ed2a1798f7f4e7d6504ef6707c1d265fb9b232c02d70c205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4783145a17a1233241a3766d9b259d16ed82b2076ff6790c1a368d713271cd91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4783145a17a1233241a3766d9b259d16ed82b2076ff6790c1a368d713271cd91\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0bb5fec15ae4eb585d8ba1c0bedabe62a4094ca50044a09b44439dc555f064d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0bb5fec15ae4eb585d8ba1c0bedabe62a4094ca50044a09b44439dc555f064d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c153df9063b1a1e897e8d02e139d641d00d84dce42004be4c69dd55d44e7436b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c153df9063b1a1e897e8d02e139d641d00d84dce42004be4c69dd55d44e7436b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9acf5d7a17bf6bb6d1ad415d830594b8f6b986fbb7c58706b19d28546dfae6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9acf5d7a17bf6bb6d1ad415d830594b8f6b986fbb7c58706b19d28546dfae6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bp485\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:42Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:42 crc kubenswrapper[4911]: I0929 21:26:42.763736 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dz6zq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c16af8d-e647-4820-b96f-298cce113ab1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e64c28e47bb861649d58b34f5e3c369cfaf8c808435b32a13ebfc2c02b7b6db9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbgg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dz6zq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:42Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:42 crc kubenswrapper[4911]: I0929 21:26:42.783944 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:42 crc kubenswrapper[4911]: I0929 21:26:42.783987 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:42 crc kubenswrapper[4911]: I0929 21:26:42.783999 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:42 crc kubenswrapper[4911]: I0929 21:26:42.784013 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:42 crc kubenswrapper[4911]: I0929 21:26:42.784023 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:42Z","lastTransitionTime":"2025-09-29T21:26:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:42 crc kubenswrapper[4911]: I0929 21:26:42.797304 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85fccbbf-637b-4195-89ba-828db0f10fb3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11c497bb84920c85649975dc5a3ba883723017d5503200e47c08427f45b9edfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a594d1677deac968c88fac4a64d4b047cf02ad8d6db74ee2ebe170d4135f588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2db0acd86c2dfcd0303626aa6b66832b18aec98dab908328288cdf0e0a18f24d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72188960402faa7956adf357824c6ea0a7015a75ed5191853ca95b12feb2cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf2588ddec2be0fa843630cdc268617c399a7a1615705cc4b088fc806031d557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0546d8270e2cbba06a241e421fa7ea130e53302a3f71f81829198d7ab8f3e6b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0546d8270e2cbba06a241e421fa7ea130e53302a3f71f81829198d7ab8f3e6b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123ea91d0a01f780d09196824c362eab68a4b8d943d414e80a3a0bbc2959e178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://123ea91d0a01f780d09196824c362eab68a4b8d943d414e80a3a0bbc2959e178\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://17751d3a9dd568ddb6c27efe08354f37aa4af7a91312e8c41e3e2e509d5be8ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://17751d3a9dd568ddb6c27efe08354f37aa4af7a91312e8c41e3e2e509d5be8ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:42Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:42 crc kubenswrapper[4911]: I0929 21:26:42.816772 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c6a0af8cc03379c6fa2a3969e5852a340c32bd8513dc7c963e074bb1694d436\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:42Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:42 crc kubenswrapper[4911]: I0929 21:26:42.832864 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w647f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50640abc-40db-4390-82d1-f3cfc76da71c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0479676aa753b6e60f9a57eb2a3055ddc5b10bce1547ac60e216c0865e79f24b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://568d099d7beea914cf9f9eb703e36dd179359d3f4406bae454334e39b0de79ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w647f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:42Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:42 crc kubenswrapper[4911]: I0929 21:26:42.849450 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:42Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:42 crc kubenswrapper[4911]: I0929 21:26:42.864204 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:42Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:42 crc kubenswrapper[4911]: I0929 21:26:42.880012 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4052e710e0a06bf38d26e6006248d090051284bc0eae7a68b17241ef8016909\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a7d7fad5efb67cc7ef796977f03cc9c0e69fcc4340ca28749fffcc13236ed8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:42Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:42 crc kubenswrapper[4911]: I0929 21:26:42.887655 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:42 crc kubenswrapper[4911]: I0929 21:26:42.887702 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:42 crc kubenswrapper[4911]: I0929 21:26:42.887758 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:42 crc kubenswrapper[4911]: I0929 21:26:42.887834 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:42 crc kubenswrapper[4911]: I0929 21:26:42.887865 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:42Z","lastTransitionTime":"2025-09-29T21:26:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:42 crc kubenswrapper[4911]: I0929 21:26:42.897383 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lrfbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1179c900-e866-4c5a-bb06-6032cc03a075\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc7df21f84f39b5c19f6039c6a102c478f158684a5952a968322654eecec14fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bb16b2f889ccaf66b96ffea2f6585967c14530a95a95000b652ce817e97c37e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T21:26:32Z\\\",\\\"message\\\":\\\"2025-09-29T21:25:46+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_ffacfa39-705c-4aac-940e-47d0cb57c1d5\\\\n2025-09-29T21:25:46+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_ffacfa39-705c-4aac-940e-47d0cb57c1d5 to /host/opt/cni/bin/\\\\n2025-09-29T21:25:47Z [verbose] multus-daemon started\\\\n2025-09-29T21:25:47Z [verbose] Readiness Indicator file check\\\\n2025-09-29T21:26:32Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:26:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jwhfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lrfbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:42Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:42 crc kubenswrapper[4911]: I0929 21:26:42.923625 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e3aa70f-b0da-44c9-a850-96d4494b02fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3568f71a4a1762e0d9be8ce4c317471f3f99a7c5b92b7daeb9fbace778f7e66b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00681163bfe474dabf977e0c43ac71148c2a472efd4bc57f3d145ba1134a74aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db1941e9959584d925b6ac546c234ce0dfaa2960a1277f282b51deb5ee56ec87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e87c1e61cde26c6ae4496e13ebe8e46857c8656720399f3d89b9e9a75bb4681d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c06f6555e780f2f9f69cabcd3e26595acfde3e973387ed7b10345c673c17f5d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c62aa953604f4ce35ca351ecf4e03da067af96f004c3ba3ea5f3a54b89def33d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9af670a0090bf77a03f27eaa0928873b2cbb836a1ac9bd6e176711e06eff996f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9af670a0090bf77a03f27eaa0928873b2cbb836a1ac9bd6e176711e06eff996f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T21:26:36Z\\\",\\\"message\\\":\\\"36.673520 6929 base_network_controller_pods.go:477] [default/openshift-multus/network-metrics-daemon-d5gdh] creating logical port openshift-multus_network-metrics-daemon-d5gdh for pod on switch crc\\\\nI0929 21:26:36.673566 6929 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-cluster-version/cluster-version-operator\\\\\\\"}\\\\nI0929 21:26:36.673209 6929 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in node crc\\\\nI0929 21:26:36.673611 6929 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb after 0 failed attempt(s)\\\\nI0929 21:26:36.673612 6929 services_controller.go:360] Finished syncing service cluster-version-operator on namespace openshift-cluster-version for network=default : 7.002132ms\\\\nF0929 21:26:36.673632 6929 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node net\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:26:35Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-9wxd8_openshift-ovn-kubernetes(4e3aa70f-b0da-44c9-a850-96d4494b02fc)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68ec54cc6e84d45b9550a2dcbd53b3ffc8cc97cb94da8d9961b29965af08b7e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9wxd8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:42Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:42 crc kubenswrapper[4911]: I0929 21:26:42.940384 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-d5gdh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b53f9593-39bf-43e0-b1de-09192d0167cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-24xkh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-24xkh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:57Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-d5gdh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:42Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:42 crc kubenswrapper[4911]: I0929 21:26:42.957788 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f56832a-7fd7-428e-85ee-55c5dea7b67a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ff3cf4013e69405637fe1483d5eb8ab2d12a0ba222c8446d93445dcb94ac9b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b32578d3e75bb839ad134f88850ba1b26e57846431fac71b64c5ddbce802464\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4539e6f57be0c686d5ea6ccbf1537c7c93698f81d2fdcfe5a143ce1f014e09\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57c9f68393ebed4fd5fe219544b0585a743989f8c54239e19ec702bddf9c30f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9dfa222360c949632050bd1dc5453a14c0dfa0d4614282132ceea2297405443e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T21:25:36Z\\\",\\\"message\\\":\\\"W0929 21:25:25.867198 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 21:25:25.867655 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759181125 cert, and key in /tmp/serving-cert-2091738680/serving-signer.crt, /tmp/serving-cert-2091738680/serving-signer.key\\\\nI0929 21:25:26.230899 1 observer_polling.go:159] Starting file observer\\\\nW0929 21:25:26.235864 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 21:25:26.236118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 21:25:26.238856 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2091738680/tls.crt::/tmp/serving-cert-2091738680/tls.key\\\\\\\"\\\\nF0929 21:25:36.606155 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47cf5d1e65f887e84d3c0694d5038edf3451c4af14d08a29d48562d05e36ea26\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:42Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:42 crc kubenswrapper[4911]: I0929 21:26:42.976044 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8646497-f7ad-4b38-a69f-9a14345af1c8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d3bb437bdac371dd4e22f20f304a5381c128ac2b774c51bc4f748a1c7aa3834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://129d51f84c8798ed7b1078f34899d4231c7bf4e1a75212f694c6c027b8463b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://290e37c77297fae7dcb3540911f6b2917e26daee3623c9106e34de394ccfa01f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7088f5a3752b8778de247eacb408d511197d1501e8e2811a98a9b713ca1fca04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:42Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:42 crc kubenswrapper[4911]: I0929 21:26:42.994214 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:42 crc kubenswrapper[4911]: I0929 21:26:42.994284 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:42 crc kubenswrapper[4911]: I0929 21:26:42.994304 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:42 crc kubenswrapper[4911]: I0929 21:26:42.994333 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:42 crc kubenswrapper[4911]: I0929 21:26:42.994353 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:42Z","lastTransitionTime":"2025-09-29T21:26:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:42 crc kubenswrapper[4911]: I0929 21:26:42.997976 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:42Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:43 crc kubenswrapper[4911]: I0929 21:26:43.015493 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a92f3bb864cba0c6615e8a07233e923c0d0681e83ad6722aa7c7043fb5966d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:43Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:43 crc kubenswrapper[4911]: I0929 21:26:43.033373 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cfbgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5b94596-b945-4a89-b362-ec649e8e7981\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5154aa6a1a3a9b1a5a58752607c01b955e28b7e896bf9b99b5cca2448077ebd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm7xf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1f8c9f36d4fa9ccfe9c4fdf2794b819a0de928056fc3c320b6c83f9c518258f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm7xf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:56Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-cfbgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:43Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:43 crc kubenswrapper[4911]: I0929 21:26:43.054369 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a2ba6d0-d11e-4f12-b9d7-f9a5b97d306a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9e660ab2714b6ff9ddefb3634d2ae48dabf0a144b5f9ba96d429654fde989f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed4c0d97fe39092f5b2cf1e4575d8ea9238b60085270aec20f28727379f9a1a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4aab1c186df02eac1c6a9a54cf66510d44e4c63bad2da3cdbe53923869cc01cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7e8a2aec2b50058d3e0f16b0756724feebf54c180e50dbbec1dc319c31234bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7e8a2aec2b50058d3e0f16b0756724feebf54c180e50dbbec1dc319c31234bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:43Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:43 crc kubenswrapper[4911]: I0929 21:26:43.097204 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:43 crc kubenswrapper[4911]: I0929 21:26:43.097283 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:43 crc kubenswrapper[4911]: I0929 21:26:43.097303 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:43 crc kubenswrapper[4911]: I0929 21:26:43.097333 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:43 crc kubenswrapper[4911]: I0929 21:26:43.097351 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:43Z","lastTransitionTime":"2025-09-29T21:26:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:43 crc kubenswrapper[4911]: I0929 21:26:43.200837 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:43 crc kubenswrapper[4911]: I0929 21:26:43.200880 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:43 crc kubenswrapper[4911]: I0929 21:26:43.200893 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:43 crc kubenswrapper[4911]: I0929 21:26:43.200913 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:43 crc kubenswrapper[4911]: I0929 21:26:43.200926 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:43Z","lastTransitionTime":"2025-09-29T21:26:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:43 crc kubenswrapper[4911]: I0929 21:26:43.303030 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:43 crc kubenswrapper[4911]: I0929 21:26:43.303082 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:43 crc kubenswrapper[4911]: I0929 21:26:43.303094 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:43 crc kubenswrapper[4911]: I0929 21:26:43.303115 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:43 crc kubenswrapper[4911]: I0929 21:26:43.303129 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:43Z","lastTransitionTime":"2025-09-29T21:26:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:43 crc kubenswrapper[4911]: I0929 21:26:43.405994 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:43 crc kubenswrapper[4911]: I0929 21:26:43.406044 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:43 crc kubenswrapper[4911]: I0929 21:26:43.406054 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:43 crc kubenswrapper[4911]: I0929 21:26:43.406074 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:43 crc kubenswrapper[4911]: I0929 21:26:43.406085 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:43Z","lastTransitionTime":"2025-09-29T21:26:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:43 crc kubenswrapper[4911]: I0929 21:26:43.509624 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:43 crc kubenswrapper[4911]: I0929 21:26:43.509858 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:43 crc kubenswrapper[4911]: I0929 21:26:43.509888 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:43 crc kubenswrapper[4911]: I0929 21:26:43.509928 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:43 crc kubenswrapper[4911]: I0929 21:26:43.509953 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:43Z","lastTransitionTime":"2025-09-29T21:26:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:43 crc kubenswrapper[4911]: I0929 21:26:43.614395 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:43 crc kubenswrapper[4911]: I0929 21:26:43.614452 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:43 crc kubenswrapper[4911]: I0929 21:26:43.614470 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:43 crc kubenswrapper[4911]: I0929 21:26:43.614496 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:43 crc kubenswrapper[4911]: I0929 21:26:43.614515 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:43Z","lastTransitionTime":"2025-09-29T21:26:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:43 crc kubenswrapper[4911]: I0929 21:26:43.700587 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 21:26:43 crc kubenswrapper[4911]: E0929 21:26:43.700907 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 21:26:43 crc kubenswrapper[4911]: I0929 21:26:43.718520 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:43 crc kubenswrapper[4911]: I0929 21:26:43.718577 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:43 crc kubenswrapper[4911]: I0929 21:26:43.718598 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:43 crc kubenswrapper[4911]: I0929 21:26:43.718626 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:43 crc kubenswrapper[4911]: I0929 21:26:43.718645 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:43Z","lastTransitionTime":"2025-09-29T21:26:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:43 crc kubenswrapper[4911]: I0929 21:26:43.822463 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:43 crc kubenswrapper[4911]: I0929 21:26:43.822540 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:43 crc kubenswrapper[4911]: I0929 21:26:43.822559 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:43 crc kubenswrapper[4911]: I0929 21:26:43.822589 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:43 crc kubenswrapper[4911]: I0929 21:26:43.822610 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:43Z","lastTransitionTime":"2025-09-29T21:26:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:43 crc kubenswrapper[4911]: I0929 21:26:43.926373 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:43 crc kubenswrapper[4911]: I0929 21:26:43.926463 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:43 crc kubenswrapper[4911]: I0929 21:26:43.926489 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:43 crc kubenswrapper[4911]: I0929 21:26:43.926518 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:43 crc kubenswrapper[4911]: I0929 21:26:43.926536 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:43Z","lastTransitionTime":"2025-09-29T21:26:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:44 crc kubenswrapper[4911]: I0929 21:26:44.029177 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:44 crc kubenswrapper[4911]: I0929 21:26:44.029270 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:44 crc kubenswrapper[4911]: I0929 21:26:44.029290 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:44 crc kubenswrapper[4911]: I0929 21:26:44.029320 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:44 crc kubenswrapper[4911]: I0929 21:26:44.029340 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:44Z","lastTransitionTime":"2025-09-29T21:26:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:44 crc kubenswrapper[4911]: I0929 21:26:44.132744 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:44 crc kubenswrapper[4911]: I0929 21:26:44.132829 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:44 crc kubenswrapper[4911]: I0929 21:26:44.132847 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:44 crc kubenswrapper[4911]: I0929 21:26:44.132868 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:44 crc kubenswrapper[4911]: I0929 21:26:44.132885 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:44Z","lastTransitionTime":"2025-09-29T21:26:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:44 crc kubenswrapper[4911]: I0929 21:26:44.236290 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:44 crc kubenswrapper[4911]: I0929 21:26:44.236377 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:44 crc kubenswrapper[4911]: I0929 21:26:44.236400 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:44 crc kubenswrapper[4911]: I0929 21:26:44.236434 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:44 crc kubenswrapper[4911]: I0929 21:26:44.236459 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:44Z","lastTransitionTime":"2025-09-29T21:26:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:44 crc kubenswrapper[4911]: I0929 21:26:44.340437 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:44 crc kubenswrapper[4911]: I0929 21:26:44.340517 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:44 crc kubenswrapper[4911]: I0929 21:26:44.340535 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:44 crc kubenswrapper[4911]: I0929 21:26:44.340564 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:44 crc kubenswrapper[4911]: I0929 21:26:44.340583 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:44Z","lastTransitionTime":"2025-09-29T21:26:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:44 crc kubenswrapper[4911]: I0929 21:26:44.444736 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:44 crc kubenswrapper[4911]: I0929 21:26:44.444856 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:44 crc kubenswrapper[4911]: I0929 21:26:44.444877 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:44 crc kubenswrapper[4911]: I0929 21:26:44.444909 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:44 crc kubenswrapper[4911]: I0929 21:26:44.444931 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:44Z","lastTransitionTime":"2025-09-29T21:26:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:44 crc kubenswrapper[4911]: I0929 21:26:44.548556 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:44 crc kubenswrapper[4911]: I0929 21:26:44.548625 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:44 crc kubenswrapper[4911]: I0929 21:26:44.548638 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:44 crc kubenswrapper[4911]: I0929 21:26:44.548659 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:44 crc kubenswrapper[4911]: I0929 21:26:44.548670 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:44Z","lastTransitionTime":"2025-09-29T21:26:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:44 crc kubenswrapper[4911]: I0929 21:26:44.651988 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:44 crc kubenswrapper[4911]: I0929 21:26:44.652032 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:44 crc kubenswrapper[4911]: I0929 21:26:44.652043 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:44 crc kubenswrapper[4911]: I0929 21:26:44.652059 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:44 crc kubenswrapper[4911]: I0929 21:26:44.652070 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:44Z","lastTransitionTime":"2025-09-29T21:26:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:44 crc kubenswrapper[4911]: I0929 21:26:44.700394 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-d5gdh" Sep 29 21:26:44 crc kubenswrapper[4911]: E0929 21:26:44.700634 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-d5gdh" podUID="b53f9593-39bf-43e0-b1de-09192d0167cd" Sep 29 21:26:44 crc kubenswrapper[4911]: I0929 21:26:44.700994 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 21:26:44 crc kubenswrapper[4911]: E0929 21:26:44.701107 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 21:26:44 crc kubenswrapper[4911]: I0929 21:26:44.701450 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:26:44 crc kubenswrapper[4911]: E0929 21:26:44.701557 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 21:26:44 crc kubenswrapper[4911]: I0929 21:26:44.755062 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:44 crc kubenswrapper[4911]: I0929 21:26:44.755095 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:44 crc kubenswrapper[4911]: I0929 21:26:44.755106 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:44 crc kubenswrapper[4911]: I0929 21:26:44.755121 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:44 crc kubenswrapper[4911]: I0929 21:26:44.755132 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:44Z","lastTransitionTime":"2025-09-29T21:26:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:44 crc kubenswrapper[4911]: I0929 21:26:44.860944 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:44 crc kubenswrapper[4911]: I0929 21:26:44.861008 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:44 crc kubenswrapper[4911]: I0929 21:26:44.861018 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:44 crc kubenswrapper[4911]: I0929 21:26:44.861034 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:44 crc kubenswrapper[4911]: I0929 21:26:44.861046 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:44Z","lastTransitionTime":"2025-09-29T21:26:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:44 crc kubenswrapper[4911]: I0929 21:26:44.967489 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:44 crc kubenswrapper[4911]: I0929 21:26:44.967566 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:44 crc kubenswrapper[4911]: I0929 21:26:44.967586 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:44 crc kubenswrapper[4911]: I0929 21:26:44.967628 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:44 crc kubenswrapper[4911]: I0929 21:26:44.967645 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:44Z","lastTransitionTime":"2025-09-29T21:26:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:45 crc kubenswrapper[4911]: I0929 21:26:45.070910 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:45 crc kubenswrapper[4911]: I0929 21:26:45.070968 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:45 crc kubenswrapper[4911]: I0929 21:26:45.070980 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:45 crc kubenswrapper[4911]: I0929 21:26:45.070999 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:45 crc kubenswrapper[4911]: I0929 21:26:45.071011 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:45Z","lastTransitionTime":"2025-09-29T21:26:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:45 crc kubenswrapper[4911]: I0929 21:26:45.174371 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:45 crc kubenswrapper[4911]: I0929 21:26:45.174433 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:45 crc kubenswrapper[4911]: I0929 21:26:45.174454 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:45 crc kubenswrapper[4911]: I0929 21:26:45.174483 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:45 crc kubenswrapper[4911]: I0929 21:26:45.174503 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:45Z","lastTransitionTime":"2025-09-29T21:26:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:45 crc kubenswrapper[4911]: I0929 21:26:45.277563 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:45 crc kubenswrapper[4911]: I0929 21:26:45.277618 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:45 crc kubenswrapper[4911]: I0929 21:26:45.277629 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:45 crc kubenswrapper[4911]: I0929 21:26:45.277649 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:45 crc kubenswrapper[4911]: I0929 21:26:45.277663 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:45Z","lastTransitionTime":"2025-09-29T21:26:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:45 crc kubenswrapper[4911]: I0929 21:26:45.380263 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:45 crc kubenswrapper[4911]: I0929 21:26:45.380315 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:45 crc kubenswrapper[4911]: I0929 21:26:45.380330 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:45 crc kubenswrapper[4911]: I0929 21:26:45.380350 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:45 crc kubenswrapper[4911]: I0929 21:26:45.380362 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:45Z","lastTransitionTime":"2025-09-29T21:26:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:45 crc kubenswrapper[4911]: I0929 21:26:45.483512 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:45 crc kubenswrapper[4911]: I0929 21:26:45.483600 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:45 crc kubenswrapper[4911]: I0929 21:26:45.483622 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:45 crc kubenswrapper[4911]: I0929 21:26:45.483652 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:45 crc kubenswrapper[4911]: I0929 21:26:45.483672 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:45Z","lastTransitionTime":"2025-09-29T21:26:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:45 crc kubenswrapper[4911]: I0929 21:26:45.586610 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:45 crc kubenswrapper[4911]: I0929 21:26:45.586713 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:45 crc kubenswrapper[4911]: I0929 21:26:45.586734 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:45 crc kubenswrapper[4911]: I0929 21:26:45.586771 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:45 crc kubenswrapper[4911]: I0929 21:26:45.586830 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:45Z","lastTransitionTime":"2025-09-29T21:26:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:45 crc kubenswrapper[4911]: I0929 21:26:45.689977 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:45 crc kubenswrapper[4911]: I0929 21:26:45.690039 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:45 crc kubenswrapper[4911]: I0929 21:26:45.690048 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:45 crc kubenswrapper[4911]: I0929 21:26:45.690067 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:45 crc kubenswrapper[4911]: I0929 21:26:45.690080 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:45Z","lastTransitionTime":"2025-09-29T21:26:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:45 crc kubenswrapper[4911]: I0929 21:26:45.700350 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 21:26:45 crc kubenswrapper[4911]: E0929 21:26:45.700501 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 21:26:45 crc kubenswrapper[4911]: I0929 21:26:45.793890 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:45 crc kubenswrapper[4911]: I0929 21:26:45.793964 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:45 crc kubenswrapper[4911]: I0929 21:26:45.793983 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:45 crc kubenswrapper[4911]: I0929 21:26:45.794012 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:45 crc kubenswrapper[4911]: I0929 21:26:45.794032 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:45Z","lastTransitionTime":"2025-09-29T21:26:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:45 crc kubenswrapper[4911]: I0929 21:26:45.810496 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:26:45 crc kubenswrapper[4911]: E0929 21:26:45.810708 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 21:27:49.810662211 +0000 UTC m=+147.787774922 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:26:45 crc kubenswrapper[4911]: I0929 21:26:45.898040 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:45 crc kubenswrapper[4911]: I0929 21:26:45.898112 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:45 crc kubenswrapper[4911]: I0929 21:26:45.898130 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:45 crc kubenswrapper[4911]: I0929 21:26:45.898162 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:45 crc kubenswrapper[4911]: I0929 21:26:45.898181 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:45Z","lastTransitionTime":"2025-09-29T21:26:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:45 crc kubenswrapper[4911]: I0929 21:26:45.912243 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 21:26:45 crc kubenswrapper[4911]: I0929 21:26:45.912328 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:26:45 crc kubenswrapper[4911]: I0929 21:26:45.912400 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:26:45 crc kubenswrapper[4911]: I0929 21:26:45.912436 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 21:26:45 crc kubenswrapper[4911]: E0929 21:26:45.912573 4911 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 21:26:45 crc kubenswrapper[4911]: E0929 21:26:45.912601 4911 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 21:26:45 crc kubenswrapper[4911]: E0929 21:26:45.912682 4911 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 21:26:45 crc kubenswrapper[4911]: E0929 21:26:45.912691 4911 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 21:26:45 crc kubenswrapper[4911]: E0929 21:26:45.912707 4911 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 21:26:45 crc kubenswrapper[4911]: E0929 21:26:45.912702 4911 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 21:26:45 crc kubenswrapper[4911]: E0929 21:26:45.912699 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 21:27:49.912658882 +0000 UTC m=+147.889771593 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 21:26:45 crc kubenswrapper[4911]: E0929 21:26:45.912949 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 21:27:49.912914851 +0000 UTC m=+147.890027582 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 21:26:45 crc kubenswrapper[4911]: E0929 21:26:45.912781 4911 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 21:26:45 crc kubenswrapper[4911]: E0929 21:26:45.913005 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-29 21:27:49.912998543 +0000 UTC m=+147.890111214 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 21:26:45 crc kubenswrapper[4911]: E0929 21:26:45.912722 4911 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 21:26:45 crc kubenswrapper[4911]: E0929 21:26:45.913193 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-29 21:27:49.913129046 +0000 UTC m=+147.890241747 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 21:26:46 crc kubenswrapper[4911]: I0929 21:26:46.001448 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:46 crc kubenswrapper[4911]: I0929 21:26:46.001519 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:46 crc kubenswrapper[4911]: I0929 21:26:46.001540 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:46 crc kubenswrapper[4911]: I0929 21:26:46.001570 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:46 crc kubenswrapper[4911]: I0929 21:26:46.001589 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:46Z","lastTransitionTime":"2025-09-29T21:26:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:46 crc kubenswrapper[4911]: I0929 21:26:46.105399 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:46 crc kubenswrapper[4911]: I0929 21:26:46.105475 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:46 crc kubenswrapper[4911]: I0929 21:26:46.105496 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:46 crc kubenswrapper[4911]: I0929 21:26:46.105526 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:46 crc kubenswrapper[4911]: I0929 21:26:46.105546 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:46Z","lastTransitionTime":"2025-09-29T21:26:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:46 crc kubenswrapper[4911]: I0929 21:26:46.209058 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:46 crc kubenswrapper[4911]: I0929 21:26:46.209156 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:46 crc kubenswrapper[4911]: I0929 21:26:46.209174 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:46 crc kubenswrapper[4911]: I0929 21:26:46.209204 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:46 crc kubenswrapper[4911]: I0929 21:26:46.209226 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:46Z","lastTransitionTime":"2025-09-29T21:26:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:46 crc kubenswrapper[4911]: I0929 21:26:46.312403 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:46 crc kubenswrapper[4911]: I0929 21:26:46.312459 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:46 crc kubenswrapper[4911]: I0929 21:26:46.312471 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:46 crc kubenswrapper[4911]: I0929 21:26:46.312493 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:46 crc kubenswrapper[4911]: I0929 21:26:46.312508 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:46Z","lastTransitionTime":"2025-09-29T21:26:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:46 crc kubenswrapper[4911]: I0929 21:26:46.415645 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:46 crc kubenswrapper[4911]: I0929 21:26:46.415715 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:46 crc kubenswrapper[4911]: I0929 21:26:46.415737 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:46 crc kubenswrapper[4911]: I0929 21:26:46.415762 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:46 crc kubenswrapper[4911]: I0929 21:26:46.415779 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:46Z","lastTransitionTime":"2025-09-29T21:26:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:46 crc kubenswrapper[4911]: I0929 21:26:46.518397 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:46 crc kubenswrapper[4911]: I0929 21:26:46.518501 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:46 crc kubenswrapper[4911]: I0929 21:26:46.518522 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:46 crc kubenswrapper[4911]: I0929 21:26:46.518585 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:46 crc kubenswrapper[4911]: I0929 21:26:46.518607 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:46Z","lastTransitionTime":"2025-09-29T21:26:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:46 crc kubenswrapper[4911]: I0929 21:26:46.621751 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:46 crc kubenswrapper[4911]: I0929 21:26:46.621851 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:46 crc kubenswrapper[4911]: I0929 21:26:46.621873 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:46 crc kubenswrapper[4911]: I0929 21:26:46.621902 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:46 crc kubenswrapper[4911]: I0929 21:26:46.621923 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:46Z","lastTransitionTime":"2025-09-29T21:26:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:46 crc kubenswrapper[4911]: I0929 21:26:46.700871 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 21:26:46 crc kubenswrapper[4911]: I0929 21:26:46.701030 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:26:46 crc kubenswrapper[4911]: I0929 21:26:46.701128 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-d5gdh" Sep 29 21:26:46 crc kubenswrapper[4911]: E0929 21:26:46.701061 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 21:26:46 crc kubenswrapper[4911]: E0929 21:26:46.701305 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 21:26:46 crc kubenswrapper[4911]: E0929 21:26:46.701500 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-d5gdh" podUID="b53f9593-39bf-43e0-b1de-09192d0167cd" Sep 29 21:26:46 crc kubenswrapper[4911]: I0929 21:26:46.724288 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:46 crc kubenswrapper[4911]: I0929 21:26:46.724344 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:46 crc kubenswrapper[4911]: I0929 21:26:46.724363 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:46 crc kubenswrapper[4911]: I0929 21:26:46.724396 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:46 crc kubenswrapper[4911]: I0929 21:26:46.724418 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:46Z","lastTransitionTime":"2025-09-29T21:26:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:46 crc kubenswrapper[4911]: I0929 21:26:46.828333 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:46 crc kubenswrapper[4911]: I0929 21:26:46.828398 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:46 crc kubenswrapper[4911]: I0929 21:26:46.828419 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:46 crc kubenswrapper[4911]: I0929 21:26:46.828447 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:46 crc kubenswrapper[4911]: I0929 21:26:46.828468 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:46Z","lastTransitionTime":"2025-09-29T21:26:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:46 crc kubenswrapper[4911]: I0929 21:26:46.931555 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:46 crc kubenswrapper[4911]: I0929 21:26:46.931639 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:46 crc kubenswrapper[4911]: I0929 21:26:46.931676 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:46 crc kubenswrapper[4911]: I0929 21:26:46.931708 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:46 crc kubenswrapper[4911]: I0929 21:26:46.931729 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:46Z","lastTransitionTime":"2025-09-29T21:26:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:47 crc kubenswrapper[4911]: I0929 21:26:47.034850 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:47 crc kubenswrapper[4911]: I0929 21:26:47.034925 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:47 crc kubenswrapper[4911]: I0929 21:26:47.034954 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:47 crc kubenswrapper[4911]: I0929 21:26:47.034984 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:47 crc kubenswrapper[4911]: I0929 21:26:47.035006 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:47Z","lastTransitionTime":"2025-09-29T21:26:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:47 crc kubenswrapper[4911]: I0929 21:26:47.138924 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:47 crc kubenswrapper[4911]: I0929 21:26:47.138997 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:47 crc kubenswrapper[4911]: I0929 21:26:47.139014 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:47 crc kubenswrapper[4911]: I0929 21:26:47.139047 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:47 crc kubenswrapper[4911]: I0929 21:26:47.139095 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:47Z","lastTransitionTime":"2025-09-29T21:26:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:47 crc kubenswrapper[4911]: I0929 21:26:47.242090 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:47 crc kubenswrapper[4911]: I0929 21:26:47.242166 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:47 crc kubenswrapper[4911]: I0929 21:26:47.242190 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:47 crc kubenswrapper[4911]: I0929 21:26:47.242230 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:47 crc kubenswrapper[4911]: I0929 21:26:47.242254 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:47Z","lastTransitionTime":"2025-09-29T21:26:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:47 crc kubenswrapper[4911]: I0929 21:26:47.346426 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:47 crc kubenswrapper[4911]: I0929 21:26:47.346514 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:47 crc kubenswrapper[4911]: I0929 21:26:47.346539 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:47 crc kubenswrapper[4911]: I0929 21:26:47.346573 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:47 crc kubenswrapper[4911]: I0929 21:26:47.346597 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:47Z","lastTransitionTime":"2025-09-29T21:26:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:47 crc kubenswrapper[4911]: I0929 21:26:47.450143 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:47 crc kubenswrapper[4911]: I0929 21:26:47.450248 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:47 crc kubenswrapper[4911]: I0929 21:26:47.450277 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:47 crc kubenswrapper[4911]: I0929 21:26:47.450312 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:47 crc kubenswrapper[4911]: I0929 21:26:47.450337 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:47Z","lastTransitionTime":"2025-09-29T21:26:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:47 crc kubenswrapper[4911]: I0929 21:26:47.553068 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:47 crc kubenswrapper[4911]: I0929 21:26:47.553129 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:47 crc kubenswrapper[4911]: I0929 21:26:47.553154 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:47 crc kubenswrapper[4911]: I0929 21:26:47.553178 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:47 crc kubenswrapper[4911]: I0929 21:26:47.553197 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:47Z","lastTransitionTime":"2025-09-29T21:26:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:47 crc kubenswrapper[4911]: I0929 21:26:47.656735 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:47 crc kubenswrapper[4911]: I0929 21:26:47.656828 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:47 crc kubenswrapper[4911]: I0929 21:26:47.656846 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:47 crc kubenswrapper[4911]: I0929 21:26:47.656873 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:47 crc kubenswrapper[4911]: I0929 21:26:47.656892 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:47Z","lastTransitionTime":"2025-09-29T21:26:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:47 crc kubenswrapper[4911]: I0929 21:26:47.700388 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 21:26:47 crc kubenswrapper[4911]: E0929 21:26:47.700561 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 21:26:47 crc kubenswrapper[4911]: I0929 21:26:47.760658 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:47 crc kubenswrapper[4911]: I0929 21:26:47.760724 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:47 crc kubenswrapper[4911]: I0929 21:26:47.760742 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:47 crc kubenswrapper[4911]: I0929 21:26:47.760773 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:47 crc kubenswrapper[4911]: I0929 21:26:47.760834 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:47Z","lastTransitionTime":"2025-09-29T21:26:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:47 crc kubenswrapper[4911]: I0929 21:26:47.864326 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:47 crc kubenswrapper[4911]: I0929 21:26:47.864389 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:47 crc kubenswrapper[4911]: I0929 21:26:47.864415 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:47 crc kubenswrapper[4911]: I0929 21:26:47.864441 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:47 crc kubenswrapper[4911]: I0929 21:26:47.864453 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:47Z","lastTransitionTime":"2025-09-29T21:26:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:47 crc kubenswrapper[4911]: I0929 21:26:47.967227 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:47 crc kubenswrapper[4911]: I0929 21:26:47.967301 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:47 crc kubenswrapper[4911]: I0929 21:26:47.967318 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:47 crc kubenswrapper[4911]: I0929 21:26:47.967345 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:47 crc kubenswrapper[4911]: I0929 21:26:47.967367 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:47Z","lastTransitionTime":"2025-09-29T21:26:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:48 crc kubenswrapper[4911]: I0929 21:26:48.070978 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:48 crc kubenswrapper[4911]: I0929 21:26:48.071044 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:48 crc kubenswrapper[4911]: I0929 21:26:48.071062 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:48 crc kubenswrapper[4911]: I0929 21:26:48.071089 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:48 crc kubenswrapper[4911]: I0929 21:26:48.071108 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:48Z","lastTransitionTime":"2025-09-29T21:26:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:48 crc kubenswrapper[4911]: I0929 21:26:48.174742 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:48 crc kubenswrapper[4911]: I0929 21:26:48.174854 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:48 crc kubenswrapper[4911]: I0929 21:26:48.174876 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:48 crc kubenswrapper[4911]: I0929 21:26:48.174905 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:48 crc kubenswrapper[4911]: I0929 21:26:48.174926 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:48Z","lastTransitionTime":"2025-09-29T21:26:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:48 crc kubenswrapper[4911]: I0929 21:26:48.277978 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:48 crc kubenswrapper[4911]: I0929 21:26:48.278071 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:48 crc kubenswrapper[4911]: I0929 21:26:48.278091 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:48 crc kubenswrapper[4911]: I0929 21:26:48.278124 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:48 crc kubenswrapper[4911]: I0929 21:26:48.278145 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:48Z","lastTransitionTime":"2025-09-29T21:26:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:48 crc kubenswrapper[4911]: I0929 21:26:48.381736 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:48 crc kubenswrapper[4911]: I0929 21:26:48.381833 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:48 crc kubenswrapper[4911]: I0929 21:26:48.381854 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:48 crc kubenswrapper[4911]: I0929 21:26:48.381884 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:48 crc kubenswrapper[4911]: I0929 21:26:48.381903 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:48Z","lastTransitionTime":"2025-09-29T21:26:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:48 crc kubenswrapper[4911]: I0929 21:26:48.486597 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:48 crc kubenswrapper[4911]: I0929 21:26:48.486671 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:48 crc kubenswrapper[4911]: I0929 21:26:48.486689 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:48 crc kubenswrapper[4911]: I0929 21:26:48.486718 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:48 crc kubenswrapper[4911]: I0929 21:26:48.486738 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:48Z","lastTransitionTime":"2025-09-29T21:26:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:48 crc kubenswrapper[4911]: I0929 21:26:48.590395 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:48 crc kubenswrapper[4911]: I0929 21:26:48.591013 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:48 crc kubenswrapper[4911]: I0929 21:26:48.591032 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:48 crc kubenswrapper[4911]: I0929 21:26:48.591060 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:48 crc kubenswrapper[4911]: I0929 21:26:48.591082 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:48Z","lastTransitionTime":"2025-09-29T21:26:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:48 crc kubenswrapper[4911]: I0929 21:26:48.693922 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:48 crc kubenswrapper[4911]: I0929 21:26:48.694003 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:48 crc kubenswrapper[4911]: I0929 21:26:48.694029 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:48 crc kubenswrapper[4911]: I0929 21:26:48.694067 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:48 crc kubenswrapper[4911]: I0929 21:26:48.694096 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:48Z","lastTransitionTime":"2025-09-29T21:26:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:48 crc kubenswrapper[4911]: I0929 21:26:48.700402 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 21:26:48 crc kubenswrapper[4911]: E0929 21:26:48.700643 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 21:26:48 crc kubenswrapper[4911]: I0929 21:26:48.700662 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:26:48 crc kubenswrapper[4911]: I0929 21:26:48.700760 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-d5gdh" Sep 29 21:26:48 crc kubenswrapper[4911]: E0929 21:26:48.700969 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 21:26:48 crc kubenswrapper[4911]: E0929 21:26:48.701618 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-d5gdh" podUID="b53f9593-39bf-43e0-b1de-09192d0167cd" Sep 29 21:26:48 crc kubenswrapper[4911]: I0929 21:26:48.797527 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:48 crc kubenswrapper[4911]: I0929 21:26:48.797604 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:48 crc kubenswrapper[4911]: I0929 21:26:48.797623 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:48 crc kubenswrapper[4911]: I0929 21:26:48.797656 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:48 crc kubenswrapper[4911]: I0929 21:26:48.797675 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:48Z","lastTransitionTime":"2025-09-29T21:26:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:48 crc kubenswrapper[4911]: I0929 21:26:48.903739 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:48 crc kubenswrapper[4911]: I0929 21:26:48.903830 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:48 crc kubenswrapper[4911]: I0929 21:26:48.903844 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:48 crc kubenswrapper[4911]: I0929 21:26:48.903865 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:48 crc kubenswrapper[4911]: I0929 21:26:48.903885 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:48Z","lastTransitionTime":"2025-09-29T21:26:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:49 crc kubenswrapper[4911]: I0929 21:26:49.007474 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:49 crc kubenswrapper[4911]: I0929 21:26:49.007537 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:49 crc kubenswrapper[4911]: I0929 21:26:49.007551 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:49 crc kubenswrapper[4911]: I0929 21:26:49.007570 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:49 crc kubenswrapper[4911]: I0929 21:26:49.007581 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:49Z","lastTransitionTime":"2025-09-29T21:26:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:49 crc kubenswrapper[4911]: I0929 21:26:49.110235 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:49 crc kubenswrapper[4911]: I0929 21:26:49.110313 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:49 crc kubenswrapper[4911]: I0929 21:26:49.110334 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:49 crc kubenswrapper[4911]: I0929 21:26:49.110365 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:49 crc kubenswrapper[4911]: I0929 21:26:49.110385 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:49Z","lastTransitionTime":"2025-09-29T21:26:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:49 crc kubenswrapper[4911]: I0929 21:26:49.214284 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:49 crc kubenswrapper[4911]: I0929 21:26:49.214344 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:49 crc kubenswrapper[4911]: I0929 21:26:49.214364 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:49 crc kubenswrapper[4911]: I0929 21:26:49.214390 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:49 crc kubenswrapper[4911]: I0929 21:26:49.214410 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:49Z","lastTransitionTime":"2025-09-29T21:26:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:49 crc kubenswrapper[4911]: I0929 21:26:49.316992 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:49 crc kubenswrapper[4911]: I0929 21:26:49.317043 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:49 crc kubenswrapper[4911]: I0929 21:26:49.317054 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:49 crc kubenswrapper[4911]: I0929 21:26:49.317072 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:49 crc kubenswrapper[4911]: I0929 21:26:49.317087 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:49Z","lastTransitionTime":"2025-09-29T21:26:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:49 crc kubenswrapper[4911]: I0929 21:26:49.420848 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:49 crc kubenswrapper[4911]: I0929 21:26:49.420915 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:49 crc kubenswrapper[4911]: I0929 21:26:49.420929 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:49 crc kubenswrapper[4911]: I0929 21:26:49.420952 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:49 crc kubenswrapper[4911]: I0929 21:26:49.420966 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:49Z","lastTransitionTime":"2025-09-29T21:26:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:49 crc kubenswrapper[4911]: I0929 21:26:49.525172 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:49 crc kubenswrapper[4911]: I0929 21:26:49.525231 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:49 crc kubenswrapper[4911]: I0929 21:26:49.525254 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:49 crc kubenswrapper[4911]: I0929 21:26:49.525288 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:49 crc kubenswrapper[4911]: I0929 21:26:49.525316 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:49Z","lastTransitionTime":"2025-09-29T21:26:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:49 crc kubenswrapper[4911]: I0929 21:26:49.627979 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:49 crc kubenswrapper[4911]: I0929 21:26:49.628044 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:49 crc kubenswrapper[4911]: I0929 21:26:49.628056 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:49 crc kubenswrapper[4911]: I0929 21:26:49.628077 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:49 crc kubenswrapper[4911]: I0929 21:26:49.628089 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:49Z","lastTransitionTime":"2025-09-29T21:26:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:49 crc kubenswrapper[4911]: I0929 21:26:49.700769 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 21:26:49 crc kubenswrapper[4911]: E0929 21:26:49.700966 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 21:26:49 crc kubenswrapper[4911]: I0929 21:26:49.731600 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:49 crc kubenswrapper[4911]: I0929 21:26:49.731681 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:49 crc kubenswrapper[4911]: I0929 21:26:49.731700 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:49 crc kubenswrapper[4911]: I0929 21:26:49.731731 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:49 crc kubenswrapper[4911]: I0929 21:26:49.731750 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:49Z","lastTransitionTime":"2025-09-29T21:26:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:49 crc kubenswrapper[4911]: I0929 21:26:49.835425 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:49 crc kubenswrapper[4911]: I0929 21:26:49.835493 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:49 crc kubenswrapper[4911]: I0929 21:26:49.835507 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:49 crc kubenswrapper[4911]: I0929 21:26:49.835532 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:49 crc kubenswrapper[4911]: I0929 21:26:49.835547 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:49Z","lastTransitionTime":"2025-09-29T21:26:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:49 crc kubenswrapper[4911]: I0929 21:26:49.939142 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:49 crc kubenswrapper[4911]: I0929 21:26:49.939200 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:49 crc kubenswrapper[4911]: I0929 21:26:49.939211 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:49 crc kubenswrapper[4911]: I0929 21:26:49.939232 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:49 crc kubenswrapper[4911]: I0929 21:26:49.939245 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:49Z","lastTransitionTime":"2025-09-29T21:26:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:50 crc kubenswrapper[4911]: I0929 21:26:50.041685 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:50 crc kubenswrapper[4911]: I0929 21:26:50.041726 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:50 crc kubenswrapper[4911]: I0929 21:26:50.041741 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:50 crc kubenswrapper[4911]: I0929 21:26:50.041758 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:50 crc kubenswrapper[4911]: I0929 21:26:50.041768 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:50Z","lastTransitionTime":"2025-09-29T21:26:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:50 crc kubenswrapper[4911]: I0929 21:26:50.144072 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:50 crc kubenswrapper[4911]: I0929 21:26:50.144129 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:50 crc kubenswrapper[4911]: I0929 21:26:50.144137 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:50 crc kubenswrapper[4911]: I0929 21:26:50.144153 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:50 crc kubenswrapper[4911]: I0929 21:26:50.144165 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:50Z","lastTransitionTime":"2025-09-29T21:26:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:50 crc kubenswrapper[4911]: I0929 21:26:50.229937 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:50 crc kubenswrapper[4911]: I0929 21:26:50.229981 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:50 crc kubenswrapper[4911]: I0929 21:26:50.229992 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:50 crc kubenswrapper[4911]: I0929 21:26:50.230007 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:50 crc kubenswrapper[4911]: I0929 21:26:50.230022 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:50Z","lastTransitionTime":"2025-09-29T21:26:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:50 crc kubenswrapper[4911]: E0929 21:26:50.247218 4911 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:50Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:50Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7dded9b5-8ab5-45b2-be5a-4613b6e8208f\\\",\\\"systemUUID\\\":\\\"6cb362cf-0841-40fb-a840-f46642f78745\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:50Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:50 crc kubenswrapper[4911]: I0929 21:26:50.251202 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:50 crc kubenswrapper[4911]: I0929 21:26:50.251241 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:50 crc kubenswrapper[4911]: I0929 21:26:50.251251 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:50 crc kubenswrapper[4911]: I0929 21:26:50.251266 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:50 crc kubenswrapper[4911]: I0929 21:26:50.251278 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:50Z","lastTransitionTime":"2025-09-29T21:26:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:50 crc kubenswrapper[4911]: E0929 21:26:50.266066 4911 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:50Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:50Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7dded9b5-8ab5-45b2-be5a-4613b6e8208f\\\",\\\"systemUUID\\\":\\\"6cb362cf-0841-40fb-a840-f46642f78745\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:50Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:50 crc kubenswrapper[4911]: I0929 21:26:50.272528 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:50 crc kubenswrapper[4911]: I0929 21:26:50.272578 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:50 crc kubenswrapper[4911]: I0929 21:26:50.272590 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:50 crc kubenswrapper[4911]: I0929 21:26:50.272609 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:50 crc kubenswrapper[4911]: I0929 21:26:50.272622 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:50Z","lastTransitionTime":"2025-09-29T21:26:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:50 crc kubenswrapper[4911]: E0929 21:26:50.285314 4911 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:50Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:50Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7dded9b5-8ab5-45b2-be5a-4613b6e8208f\\\",\\\"systemUUID\\\":\\\"6cb362cf-0841-40fb-a840-f46642f78745\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:50Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:50 crc kubenswrapper[4911]: I0929 21:26:50.289632 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:50 crc kubenswrapper[4911]: I0929 21:26:50.289685 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:50 crc kubenswrapper[4911]: I0929 21:26:50.289717 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:50 crc kubenswrapper[4911]: I0929 21:26:50.289751 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:50 crc kubenswrapper[4911]: I0929 21:26:50.289772 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:50Z","lastTransitionTime":"2025-09-29T21:26:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:50 crc kubenswrapper[4911]: E0929 21:26:50.308217 4911 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:50Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:50Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7dded9b5-8ab5-45b2-be5a-4613b6e8208f\\\",\\\"systemUUID\\\":\\\"6cb362cf-0841-40fb-a840-f46642f78745\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:50Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:50 crc kubenswrapper[4911]: I0929 21:26:50.313088 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:50 crc kubenswrapper[4911]: I0929 21:26:50.313136 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:50 crc kubenswrapper[4911]: I0929 21:26:50.313154 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:50 crc kubenswrapper[4911]: I0929 21:26:50.313179 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:50 crc kubenswrapper[4911]: I0929 21:26:50.313201 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:50Z","lastTransitionTime":"2025-09-29T21:26:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:50 crc kubenswrapper[4911]: E0929 21:26:50.330945 4911 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:50Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:26:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:50Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7dded9b5-8ab5-45b2-be5a-4613b6e8208f\\\",\\\"systemUUID\\\":\\\"6cb362cf-0841-40fb-a840-f46642f78745\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:50Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:50 crc kubenswrapper[4911]: E0929 21:26:50.331171 4911 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 29 21:26:50 crc kubenswrapper[4911]: I0929 21:26:50.333447 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:50 crc kubenswrapper[4911]: I0929 21:26:50.333496 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:50 crc kubenswrapper[4911]: I0929 21:26:50.333508 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:50 crc kubenswrapper[4911]: I0929 21:26:50.333527 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:50 crc kubenswrapper[4911]: I0929 21:26:50.333540 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:50Z","lastTransitionTime":"2025-09-29T21:26:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:50 crc kubenswrapper[4911]: I0929 21:26:50.436764 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:50 crc kubenswrapper[4911]: I0929 21:26:50.436826 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:50 crc kubenswrapper[4911]: I0929 21:26:50.436837 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:50 crc kubenswrapper[4911]: I0929 21:26:50.436853 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:50 crc kubenswrapper[4911]: I0929 21:26:50.436868 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:50Z","lastTransitionTime":"2025-09-29T21:26:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:50 crc kubenswrapper[4911]: I0929 21:26:50.540053 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:50 crc kubenswrapper[4911]: I0929 21:26:50.540098 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:50 crc kubenswrapper[4911]: I0929 21:26:50.540115 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:50 crc kubenswrapper[4911]: I0929 21:26:50.540139 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:50 crc kubenswrapper[4911]: I0929 21:26:50.540155 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:50Z","lastTransitionTime":"2025-09-29T21:26:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:50 crc kubenswrapper[4911]: I0929 21:26:50.643696 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:50 crc kubenswrapper[4911]: I0929 21:26:50.643737 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:50 crc kubenswrapper[4911]: I0929 21:26:50.643748 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:50 crc kubenswrapper[4911]: I0929 21:26:50.643766 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:50 crc kubenswrapper[4911]: I0929 21:26:50.643776 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:50Z","lastTransitionTime":"2025-09-29T21:26:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:50 crc kubenswrapper[4911]: I0929 21:26:50.700715 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:26:50 crc kubenswrapper[4911]: I0929 21:26:50.700749 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 21:26:50 crc kubenswrapper[4911]: E0929 21:26:50.700909 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 21:26:50 crc kubenswrapper[4911]: I0929 21:26:50.700827 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-d5gdh" Sep 29 21:26:50 crc kubenswrapper[4911]: E0929 21:26:50.701079 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 21:26:50 crc kubenswrapper[4911]: E0929 21:26:50.701154 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-d5gdh" podUID="b53f9593-39bf-43e0-b1de-09192d0167cd" Sep 29 21:26:50 crc kubenswrapper[4911]: I0929 21:26:50.746219 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:50 crc kubenswrapper[4911]: I0929 21:26:50.746256 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:50 crc kubenswrapper[4911]: I0929 21:26:50.746265 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:50 crc kubenswrapper[4911]: I0929 21:26:50.746279 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:50 crc kubenswrapper[4911]: I0929 21:26:50.746290 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:50Z","lastTransitionTime":"2025-09-29T21:26:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:50 crc kubenswrapper[4911]: I0929 21:26:50.849269 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:50 crc kubenswrapper[4911]: I0929 21:26:50.849313 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:50 crc kubenswrapper[4911]: I0929 21:26:50.849323 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:50 crc kubenswrapper[4911]: I0929 21:26:50.849337 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:50 crc kubenswrapper[4911]: I0929 21:26:50.849348 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:50Z","lastTransitionTime":"2025-09-29T21:26:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:50 crc kubenswrapper[4911]: I0929 21:26:50.952186 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:50 crc kubenswrapper[4911]: I0929 21:26:50.952227 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:50 crc kubenswrapper[4911]: I0929 21:26:50.952235 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:50 crc kubenswrapper[4911]: I0929 21:26:50.952250 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:50 crc kubenswrapper[4911]: I0929 21:26:50.952260 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:50Z","lastTransitionTime":"2025-09-29T21:26:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:51 crc kubenswrapper[4911]: I0929 21:26:51.055178 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:51 crc kubenswrapper[4911]: I0929 21:26:51.055217 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:51 crc kubenswrapper[4911]: I0929 21:26:51.055225 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:51 crc kubenswrapper[4911]: I0929 21:26:51.055239 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:51 crc kubenswrapper[4911]: I0929 21:26:51.055248 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:51Z","lastTransitionTime":"2025-09-29T21:26:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:51 crc kubenswrapper[4911]: I0929 21:26:51.157548 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:51 crc kubenswrapper[4911]: I0929 21:26:51.157608 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:51 crc kubenswrapper[4911]: I0929 21:26:51.157622 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:51 crc kubenswrapper[4911]: I0929 21:26:51.157639 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:51 crc kubenswrapper[4911]: I0929 21:26:51.157650 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:51Z","lastTransitionTime":"2025-09-29T21:26:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:51 crc kubenswrapper[4911]: I0929 21:26:51.259667 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:51 crc kubenswrapper[4911]: I0929 21:26:51.259695 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:51 crc kubenswrapper[4911]: I0929 21:26:51.259703 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:51 crc kubenswrapper[4911]: I0929 21:26:51.259716 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:51 crc kubenswrapper[4911]: I0929 21:26:51.259725 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:51Z","lastTransitionTime":"2025-09-29T21:26:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:51 crc kubenswrapper[4911]: I0929 21:26:51.361419 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:51 crc kubenswrapper[4911]: I0929 21:26:51.361456 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:51 crc kubenswrapper[4911]: I0929 21:26:51.361465 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:51 crc kubenswrapper[4911]: I0929 21:26:51.361479 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:51 crc kubenswrapper[4911]: I0929 21:26:51.361489 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:51Z","lastTransitionTime":"2025-09-29T21:26:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:51 crc kubenswrapper[4911]: I0929 21:26:51.463090 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:51 crc kubenswrapper[4911]: I0929 21:26:51.463132 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:51 crc kubenswrapper[4911]: I0929 21:26:51.463145 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:51 crc kubenswrapper[4911]: I0929 21:26:51.463164 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:51 crc kubenswrapper[4911]: I0929 21:26:51.463177 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:51Z","lastTransitionTime":"2025-09-29T21:26:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:51 crc kubenswrapper[4911]: I0929 21:26:51.565311 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:51 crc kubenswrapper[4911]: I0929 21:26:51.565342 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:51 crc kubenswrapper[4911]: I0929 21:26:51.565351 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:51 crc kubenswrapper[4911]: I0929 21:26:51.565366 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:51 crc kubenswrapper[4911]: I0929 21:26:51.565375 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:51Z","lastTransitionTime":"2025-09-29T21:26:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:51 crc kubenswrapper[4911]: I0929 21:26:51.668281 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:51 crc kubenswrapper[4911]: I0929 21:26:51.668315 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:51 crc kubenswrapper[4911]: I0929 21:26:51.668324 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:51 crc kubenswrapper[4911]: I0929 21:26:51.668338 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:51 crc kubenswrapper[4911]: I0929 21:26:51.668351 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:51Z","lastTransitionTime":"2025-09-29T21:26:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:51 crc kubenswrapper[4911]: I0929 21:26:51.700258 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 21:26:51 crc kubenswrapper[4911]: E0929 21:26:51.700501 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 21:26:51 crc kubenswrapper[4911]: I0929 21:26:51.771162 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:51 crc kubenswrapper[4911]: I0929 21:26:51.771194 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:51 crc kubenswrapper[4911]: I0929 21:26:51.771203 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:51 crc kubenswrapper[4911]: I0929 21:26:51.771216 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:51 crc kubenswrapper[4911]: I0929 21:26:51.771225 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:51Z","lastTransitionTime":"2025-09-29T21:26:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:51 crc kubenswrapper[4911]: I0929 21:26:51.873594 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:51 crc kubenswrapper[4911]: I0929 21:26:51.873650 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:51 crc kubenswrapper[4911]: I0929 21:26:51.873663 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:51 crc kubenswrapper[4911]: I0929 21:26:51.873719 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:51 crc kubenswrapper[4911]: I0929 21:26:51.873734 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:51Z","lastTransitionTime":"2025-09-29T21:26:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:51 crc kubenswrapper[4911]: I0929 21:26:51.976838 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:51 crc kubenswrapper[4911]: I0929 21:26:51.976920 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:51 crc kubenswrapper[4911]: I0929 21:26:51.976945 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:51 crc kubenswrapper[4911]: I0929 21:26:51.976981 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:51 crc kubenswrapper[4911]: I0929 21:26:51.977006 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:51Z","lastTransitionTime":"2025-09-29T21:26:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:52 crc kubenswrapper[4911]: I0929 21:26:52.080237 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:52 crc kubenswrapper[4911]: I0929 21:26:52.080292 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:52 crc kubenswrapper[4911]: I0929 21:26:52.080303 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:52 crc kubenswrapper[4911]: I0929 21:26:52.080324 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:52 crc kubenswrapper[4911]: I0929 21:26:52.080336 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:52Z","lastTransitionTime":"2025-09-29T21:26:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:52 crc kubenswrapper[4911]: I0929 21:26:52.184210 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:52 crc kubenswrapper[4911]: I0929 21:26:52.184298 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:52 crc kubenswrapper[4911]: I0929 21:26:52.184324 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:52 crc kubenswrapper[4911]: I0929 21:26:52.184354 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:52 crc kubenswrapper[4911]: I0929 21:26:52.184381 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:52Z","lastTransitionTime":"2025-09-29T21:26:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:52 crc kubenswrapper[4911]: I0929 21:26:52.289103 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:52 crc kubenswrapper[4911]: I0929 21:26:52.289177 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:52 crc kubenswrapper[4911]: I0929 21:26:52.289196 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:52 crc kubenswrapper[4911]: I0929 21:26:52.289228 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:52 crc kubenswrapper[4911]: I0929 21:26:52.289250 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:52Z","lastTransitionTime":"2025-09-29T21:26:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:52 crc kubenswrapper[4911]: I0929 21:26:52.392488 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:52 crc kubenswrapper[4911]: I0929 21:26:52.392533 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:52 crc kubenswrapper[4911]: I0929 21:26:52.392545 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:52 crc kubenswrapper[4911]: I0929 21:26:52.392562 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:52 crc kubenswrapper[4911]: I0929 21:26:52.392573 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:52Z","lastTransitionTime":"2025-09-29T21:26:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:52 crc kubenswrapper[4911]: I0929 21:26:52.496111 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:52 crc kubenswrapper[4911]: I0929 21:26:52.496170 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:52 crc kubenswrapper[4911]: I0929 21:26:52.496181 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:52 crc kubenswrapper[4911]: I0929 21:26:52.496204 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:52 crc kubenswrapper[4911]: I0929 21:26:52.496219 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:52Z","lastTransitionTime":"2025-09-29T21:26:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:52 crc kubenswrapper[4911]: I0929 21:26:52.598974 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:52 crc kubenswrapper[4911]: I0929 21:26:52.599013 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:52 crc kubenswrapper[4911]: I0929 21:26:52.599029 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:52 crc kubenswrapper[4911]: I0929 21:26:52.599048 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:52 crc kubenswrapper[4911]: I0929 21:26:52.599059 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:52Z","lastTransitionTime":"2025-09-29T21:26:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:52 crc kubenswrapper[4911]: I0929 21:26:52.700721 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:26:52 crc kubenswrapper[4911]: I0929 21:26:52.701011 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 21:26:52 crc kubenswrapper[4911]: E0929 21:26:52.701159 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 21:26:52 crc kubenswrapper[4911]: I0929 21:26:52.701189 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-d5gdh" Sep 29 21:26:52 crc kubenswrapper[4911]: E0929 21:26:52.701587 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 21:26:52 crc kubenswrapper[4911]: E0929 21:26:52.702447 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-d5gdh" podUID="b53f9593-39bf-43e0-b1de-09192d0167cd" Sep 29 21:26:52 crc kubenswrapper[4911]: I0929 21:26:52.702820 4911 scope.go:117] "RemoveContainer" containerID="9af670a0090bf77a03f27eaa0928873b2cbb836a1ac9bd6e176711e06eff996f" Sep 29 21:26:52 crc kubenswrapper[4911]: E0929 21:26:52.703108 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-9wxd8_openshift-ovn-kubernetes(4e3aa70f-b0da-44c9-a850-96d4494b02fc)\"" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" podUID="4e3aa70f-b0da-44c9-a850-96d4494b02fc" Sep 29 21:26:52 crc kubenswrapper[4911]: I0929 21:26:52.703896 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:52 crc kubenswrapper[4911]: I0929 21:26:52.703966 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:52 crc kubenswrapper[4911]: I0929 21:26:52.703989 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:52 crc kubenswrapper[4911]: I0929 21:26:52.704025 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:52 crc kubenswrapper[4911]: I0929 21:26:52.704047 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:52Z","lastTransitionTime":"2025-09-29T21:26:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:52 crc kubenswrapper[4911]: I0929 21:26:52.717590 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c88d1c5-bcf0-4d96-9c77-9c310fd1b492\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://521e40cd673dac9022f408c58f537d4504f4077392fc1ea2ebf51f126f7ae6c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f95f11783829a277767850aa2a8af98d27b5c5f205c36392671b69f368bffa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f95f11783829a277767850aa2a8af98d27b5c5f205c36392671b69f368bffa1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:52Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:52 crc kubenswrapper[4911]: I0929 21:26:52.736497 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a2ba6d0-d11e-4f12-b9d7-f9a5b97d306a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9e660ab2714b6ff9ddefb3634d2ae48dabf0a144b5f9ba96d429654fde989f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed4c0d97fe39092f5b2cf1e4575d8ea9238b60085270aec20f28727379f9a1a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4aab1c186df02eac1c6a9a54cf66510d44e4c63bad2da3cdbe53923869cc01cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7e8a2aec2b50058d3e0f16b0756724feebf54c180e50dbbec1dc319c31234bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7e8a2aec2b50058d3e0f16b0756724feebf54c180e50dbbec1dc319c31234bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:52Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:52 crc kubenswrapper[4911]: I0929 21:26:52.758443 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:52Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:52 crc kubenswrapper[4911]: I0929 21:26:52.772878 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a92f3bb864cba0c6615e8a07233e923c0d0681e83ad6722aa7c7043fb5966d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:52Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:52 crc kubenswrapper[4911]: I0929 21:26:52.790667 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cfbgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5b94596-b945-4a89-b362-ec649e8e7981\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5154aa6a1a3a9b1a5a58752607c01b955e28b7e896bf9b99b5cca2448077ebd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm7xf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1f8c9f36d4fa9ccfe9c4fdf2794b819a0de928056fc3c320b6c83f9c518258f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm7xf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:56Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-cfbgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:52Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:52 crc kubenswrapper[4911]: I0929 21:26:52.807374 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:52 crc kubenswrapper[4911]: I0929 21:26:52.807483 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:52 crc kubenswrapper[4911]: I0929 21:26:52.807508 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:52 crc kubenswrapper[4911]: I0929 21:26:52.807541 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:52 crc kubenswrapper[4911]: I0929 21:26:52.807564 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:52Z","lastTransitionTime":"2025-09-29T21:26:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:52 crc kubenswrapper[4911]: I0929 21:26:52.817132 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85fccbbf-637b-4195-89ba-828db0f10fb3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11c497bb84920c85649975dc5a3ba883723017d5503200e47c08427f45b9edfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a594d1677deac968c88fac4a64d4b047cf02ad8d6db74ee2ebe170d4135f588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2db0acd86c2dfcd0303626aa6b66832b18aec98dab908328288cdf0e0a18f24d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72188960402faa7956adf357824c6ea0a7015a75ed5191853ca95b12feb2cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf2588ddec2be0fa843630cdc268617c399a7a1615705cc4b088fc806031d557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0546d8270e2cbba06a241e421fa7ea130e53302a3f71f81829198d7ab8f3e6b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0546d8270e2cbba06a241e421fa7ea130e53302a3f71f81829198d7ab8f3e6b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123ea91d0a01f780d09196824c362eab68a4b8d943d414e80a3a0bbc2959e178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://123ea91d0a01f780d09196824c362eab68a4b8d943d414e80a3a0bbc2959e178\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://17751d3a9dd568ddb6c27efe08354f37aa4af7a91312e8c41e3e2e509d5be8ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://17751d3a9dd568ddb6c27efe08354f37aa4af7a91312e8c41e3e2e509d5be8ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:52Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:52 crc kubenswrapper[4911]: I0929 21:26:52.841496 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c6a0af8cc03379c6fa2a3969e5852a340c32bd8513dc7c963e074bb1694d436\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:52Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:52 crc kubenswrapper[4911]: I0929 21:26:52.856631 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dnhjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fe62a53-f0b8-4c66-8adf-3b9f8bef4195\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b11f03867ce6c678d04431a19903c21acd7665f0cca4c7b0344e9d1cf0e17e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64hpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dnhjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:52Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:52 crc kubenswrapper[4911]: I0929 21:26:52.885545 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bp485" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"364060da-3bac-4f3e-b8b8-a64b0441cb5e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7dba9267c23bc8990d70b320a688e96796cf75aca99f5a3ce12fa82327cb2d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7962008bbec5a6556b4ab76cf151a68e971bde07fdc5fe560c561f42cf597d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7962008bbec5a6556b4ab76cf151a68e971bde07fdc5fe560c561f42cf597d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a6af280cf9196a9ed2a1798f7f4e7d6504ef6707c1d265fb9b232c02d70c205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a6af280cf9196a9ed2a1798f7f4e7d6504ef6707c1d265fb9b232c02d70c205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4783145a17a1233241a3766d9b259d16ed82b2076ff6790c1a368d713271cd91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4783145a17a1233241a3766d9b259d16ed82b2076ff6790c1a368d713271cd91\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0bb5fec15ae4eb585d8ba1c0bedabe62a4094ca50044a09b44439dc555f064d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0bb5fec15ae4eb585d8ba1c0bedabe62a4094ca50044a09b44439dc555f064d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c153df9063b1a1e897e8d02e139d641d00d84dce42004be4c69dd55d44e7436b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c153df9063b1a1e897e8d02e139d641d00d84dce42004be4c69dd55d44e7436b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9acf5d7a17bf6bb6d1ad415d830594b8f6b986fbb7c58706b19d28546dfae6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9acf5d7a17bf6bb6d1ad415d830594b8f6b986fbb7c58706b19d28546dfae6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bp485\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:52Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:52 crc kubenswrapper[4911]: I0929 21:26:52.899308 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dz6zq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c16af8d-e647-4820-b96f-298cce113ab1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e64c28e47bb861649d58b34f5e3c369cfaf8c808435b32a13ebfc2c02b7b6db9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbgg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dz6zq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:52Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:52 crc kubenswrapper[4911]: I0929 21:26:52.912749 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:52 crc kubenswrapper[4911]: I0929 21:26:52.912863 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:52 crc kubenswrapper[4911]: I0929 21:26:52.912886 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:52 crc kubenswrapper[4911]: I0929 21:26:52.912918 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:52 crc kubenswrapper[4911]: I0929 21:26:52.912938 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:52Z","lastTransitionTime":"2025-09-29T21:26:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:52 crc kubenswrapper[4911]: I0929 21:26:52.917146 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w647f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50640abc-40db-4390-82d1-f3cfc76da71c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0479676aa753b6e60f9a57eb2a3055ddc5b10bce1547ac60e216c0865e79f24b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://568d099d7beea914cf9f9eb703e36dd179359d3f4406bae454334e39b0de79ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w647f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:52Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:52 crc kubenswrapper[4911]: I0929 21:26:52.934133 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f56832a-7fd7-428e-85ee-55c5dea7b67a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ff3cf4013e69405637fe1483d5eb8ab2d12a0ba222c8446d93445dcb94ac9b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b32578d3e75bb839ad134f88850ba1b26e57846431fac71b64c5ddbce802464\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4539e6f57be0c686d5ea6ccbf1537c7c93698f81d2fdcfe5a143ce1f014e09\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57c9f68393ebed4fd5fe219544b0585a743989f8c54239e19ec702bddf9c30f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9dfa222360c949632050bd1dc5453a14c0dfa0d4614282132ceea2297405443e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T21:25:36Z\\\",\\\"message\\\":\\\"W0929 21:25:25.867198 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 21:25:25.867655 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759181125 cert, and key in /tmp/serving-cert-2091738680/serving-signer.crt, /tmp/serving-cert-2091738680/serving-signer.key\\\\nI0929 21:25:26.230899 1 observer_polling.go:159] Starting file observer\\\\nW0929 21:25:26.235864 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 21:25:26.236118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 21:25:26.238856 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2091738680/tls.crt::/tmp/serving-cert-2091738680/tls.key\\\\\\\"\\\\nF0929 21:25:36.606155 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47cf5d1e65f887e84d3c0694d5038edf3451c4af14d08a29d48562d05e36ea26\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:52Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:52 crc kubenswrapper[4911]: I0929 21:26:52.952749 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8646497-f7ad-4b38-a69f-9a14345af1c8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d3bb437bdac371dd4e22f20f304a5381c128ac2b774c51bc4f748a1c7aa3834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://129d51f84c8798ed7b1078f34899d4231c7bf4e1a75212f694c6c027b8463b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://290e37c77297fae7dcb3540911f6b2917e26daee3623c9106e34de394ccfa01f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7088f5a3752b8778de247eacb408d511197d1501e8e2811a98a9b713ca1fca04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:52Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:52 crc kubenswrapper[4911]: I0929 21:26:52.969466 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:52Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:52 crc kubenswrapper[4911]: I0929 21:26:52.990385 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:52Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:53 crc kubenswrapper[4911]: I0929 21:26:53.012473 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4052e710e0a06bf38d26e6006248d090051284bc0eae7a68b17241ef8016909\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a7d7fad5efb67cc7ef796977f03cc9c0e69fcc4340ca28749fffcc13236ed8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:53Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:53 crc kubenswrapper[4911]: I0929 21:26:53.015864 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:53 crc kubenswrapper[4911]: I0929 21:26:53.015932 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:53 crc kubenswrapper[4911]: I0929 21:26:53.015951 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:53 crc kubenswrapper[4911]: I0929 21:26:53.015985 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:53 crc kubenswrapper[4911]: I0929 21:26:53.016006 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:53Z","lastTransitionTime":"2025-09-29T21:26:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:53 crc kubenswrapper[4911]: I0929 21:26:53.028948 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lrfbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1179c900-e866-4c5a-bb06-6032cc03a075\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc7df21f84f39b5c19f6039c6a102c478f158684a5952a968322654eecec14fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bb16b2f889ccaf66b96ffea2f6585967c14530a95a95000b652ce817e97c37e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T21:26:32Z\\\",\\\"message\\\":\\\"2025-09-29T21:25:46+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_ffacfa39-705c-4aac-940e-47d0cb57c1d5\\\\n2025-09-29T21:25:46+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_ffacfa39-705c-4aac-940e-47d0cb57c1d5 to /host/opt/cni/bin/\\\\n2025-09-29T21:25:47Z [verbose] multus-daemon started\\\\n2025-09-29T21:25:47Z [verbose] Readiness Indicator file check\\\\n2025-09-29T21:26:32Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:26:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jwhfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lrfbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:53Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:53 crc kubenswrapper[4911]: I0929 21:26:53.051060 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e3aa70f-b0da-44c9-a850-96d4494b02fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3568f71a4a1762e0d9be8ce4c317471f3f99a7c5b92b7daeb9fbace778f7e66b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00681163bfe474dabf977e0c43ac71148c2a472efd4bc57f3d145ba1134a74aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db1941e9959584d925b6ac546c234ce0dfaa2960a1277f282b51deb5ee56ec87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e87c1e61cde26c6ae4496e13ebe8e46857c8656720399f3d89b9e9a75bb4681d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c06f6555e780f2f9f69cabcd3e26595acfde3e973387ed7b10345c673c17f5d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c62aa953604f4ce35ca351ecf4e03da067af96f004c3ba3ea5f3a54b89def33d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9af670a0090bf77a03f27eaa0928873b2cbb836a1ac9bd6e176711e06eff996f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9af670a0090bf77a03f27eaa0928873b2cbb836a1ac9bd6e176711e06eff996f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T21:26:36Z\\\",\\\"message\\\":\\\"36.673520 6929 base_network_controller_pods.go:477] [default/openshift-multus/network-metrics-daemon-d5gdh] creating logical port openshift-multus_network-metrics-daemon-d5gdh for pod on switch crc\\\\nI0929 21:26:36.673566 6929 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-cluster-version/cluster-version-operator\\\\\\\"}\\\\nI0929 21:26:36.673209 6929 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in node crc\\\\nI0929 21:26:36.673611 6929 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb after 0 failed attempt(s)\\\\nI0929 21:26:36.673612 6929 services_controller.go:360] Finished syncing service cluster-version-operator on namespace openshift-cluster-version for network=default : 7.002132ms\\\\nF0929 21:26:36.673632 6929 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node net\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:26:35Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-9wxd8_openshift-ovn-kubernetes(4e3aa70f-b0da-44c9-a850-96d4494b02fc)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68ec54cc6e84d45b9550a2dcbd53b3ffc8cc97cb94da8d9961b29965af08b7e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9wxd8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:53Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:53 crc kubenswrapper[4911]: I0929 21:26:53.067173 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-d5gdh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b53f9593-39bf-43e0-b1de-09192d0167cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-24xkh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-24xkh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:57Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-d5gdh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:26:53Z is after 2025-08-24T17:21:41Z" Sep 29 21:26:53 crc kubenswrapper[4911]: I0929 21:26:53.127409 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:53 crc kubenswrapper[4911]: I0929 21:26:53.127489 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:53 crc kubenswrapper[4911]: I0929 21:26:53.127516 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:53 crc kubenswrapper[4911]: I0929 21:26:53.127551 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:53 crc kubenswrapper[4911]: I0929 21:26:53.127577 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:53Z","lastTransitionTime":"2025-09-29T21:26:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:53 crc kubenswrapper[4911]: I0929 21:26:53.230355 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:53 crc kubenswrapper[4911]: I0929 21:26:53.230406 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:53 crc kubenswrapper[4911]: I0929 21:26:53.230431 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:53 crc kubenswrapper[4911]: I0929 21:26:53.230460 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:53 crc kubenswrapper[4911]: I0929 21:26:53.230488 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:53Z","lastTransitionTime":"2025-09-29T21:26:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:53 crc kubenswrapper[4911]: I0929 21:26:53.332879 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:53 crc kubenswrapper[4911]: I0929 21:26:53.332956 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:53 crc kubenswrapper[4911]: I0929 21:26:53.332975 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:53 crc kubenswrapper[4911]: I0929 21:26:53.333007 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:53 crc kubenswrapper[4911]: I0929 21:26:53.333034 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:53Z","lastTransitionTime":"2025-09-29T21:26:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:53 crc kubenswrapper[4911]: I0929 21:26:53.436639 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:53 crc kubenswrapper[4911]: I0929 21:26:53.436734 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:53 crc kubenswrapper[4911]: I0929 21:26:53.436763 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:53 crc kubenswrapper[4911]: I0929 21:26:53.436833 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:53 crc kubenswrapper[4911]: I0929 21:26:53.436864 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:53Z","lastTransitionTime":"2025-09-29T21:26:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:53 crc kubenswrapper[4911]: I0929 21:26:53.539729 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:53 crc kubenswrapper[4911]: I0929 21:26:53.539912 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:53 crc kubenswrapper[4911]: I0929 21:26:53.539935 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:53 crc kubenswrapper[4911]: I0929 21:26:53.539965 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:53 crc kubenswrapper[4911]: I0929 21:26:53.539984 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:53Z","lastTransitionTime":"2025-09-29T21:26:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:53 crc kubenswrapper[4911]: I0929 21:26:53.642978 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:53 crc kubenswrapper[4911]: I0929 21:26:53.643051 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:53 crc kubenswrapper[4911]: I0929 21:26:53.643074 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:53 crc kubenswrapper[4911]: I0929 21:26:53.643102 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:53 crc kubenswrapper[4911]: I0929 21:26:53.643121 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:53Z","lastTransitionTime":"2025-09-29T21:26:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:53 crc kubenswrapper[4911]: I0929 21:26:53.700934 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 21:26:53 crc kubenswrapper[4911]: E0929 21:26:53.701075 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 21:26:53 crc kubenswrapper[4911]: I0929 21:26:53.746834 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:53 crc kubenswrapper[4911]: I0929 21:26:53.746922 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:53 crc kubenswrapper[4911]: I0929 21:26:53.746940 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:53 crc kubenswrapper[4911]: I0929 21:26:53.746970 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:53 crc kubenswrapper[4911]: I0929 21:26:53.746989 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:53Z","lastTransitionTime":"2025-09-29T21:26:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:53 crc kubenswrapper[4911]: I0929 21:26:53.850109 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:53 crc kubenswrapper[4911]: I0929 21:26:53.850247 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:53 crc kubenswrapper[4911]: I0929 21:26:53.850270 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:53 crc kubenswrapper[4911]: I0929 21:26:53.850302 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:53 crc kubenswrapper[4911]: I0929 21:26:53.850320 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:53Z","lastTransitionTime":"2025-09-29T21:26:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:53 crc kubenswrapper[4911]: I0929 21:26:53.954724 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:53 crc kubenswrapper[4911]: I0929 21:26:53.954780 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:53 crc kubenswrapper[4911]: I0929 21:26:53.954815 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:53 crc kubenswrapper[4911]: I0929 21:26:53.954837 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:53 crc kubenswrapper[4911]: I0929 21:26:53.954847 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:53Z","lastTransitionTime":"2025-09-29T21:26:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:54 crc kubenswrapper[4911]: I0929 21:26:54.057558 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:54 crc kubenswrapper[4911]: I0929 21:26:54.057595 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:54 crc kubenswrapper[4911]: I0929 21:26:54.057605 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:54 crc kubenswrapper[4911]: I0929 21:26:54.057622 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:54 crc kubenswrapper[4911]: I0929 21:26:54.057635 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:54Z","lastTransitionTime":"2025-09-29T21:26:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:54 crc kubenswrapper[4911]: I0929 21:26:54.160568 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:54 crc kubenswrapper[4911]: I0929 21:26:54.160612 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:54 crc kubenswrapper[4911]: I0929 21:26:54.160621 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:54 crc kubenswrapper[4911]: I0929 21:26:54.160636 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:54 crc kubenswrapper[4911]: I0929 21:26:54.160646 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:54Z","lastTransitionTime":"2025-09-29T21:26:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:54 crc kubenswrapper[4911]: I0929 21:26:54.263676 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:54 crc kubenswrapper[4911]: I0929 21:26:54.263754 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:54 crc kubenswrapper[4911]: I0929 21:26:54.263766 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:54 crc kubenswrapper[4911]: I0929 21:26:54.263784 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:54 crc kubenswrapper[4911]: I0929 21:26:54.263827 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:54Z","lastTransitionTime":"2025-09-29T21:26:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:54 crc kubenswrapper[4911]: I0929 21:26:54.366541 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:54 crc kubenswrapper[4911]: I0929 21:26:54.366586 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:54 crc kubenswrapper[4911]: I0929 21:26:54.366781 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:54 crc kubenswrapper[4911]: I0929 21:26:54.366815 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:54 crc kubenswrapper[4911]: I0929 21:26:54.366826 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:54Z","lastTransitionTime":"2025-09-29T21:26:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:54 crc kubenswrapper[4911]: I0929 21:26:54.469082 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:54 crc kubenswrapper[4911]: I0929 21:26:54.469127 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:54 crc kubenswrapper[4911]: I0929 21:26:54.469136 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:54 crc kubenswrapper[4911]: I0929 21:26:54.469153 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:54 crc kubenswrapper[4911]: I0929 21:26:54.469166 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:54Z","lastTransitionTime":"2025-09-29T21:26:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:54 crc kubenswrapper[4911]: I0929 21:26:54.571634 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:54 crc kubenswrapper[4911]: I0929 21:26:54.571675 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:54 crc kubenswrapper[4911]: I0929 21:26:54.571686 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:54 crc kubenswrapper[4911]: I0929 21:26:54.571702 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:54 crc kubenswrapper[4911]: I0929 21:26:54.571713 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:54Z","lastTransitionTime":"2025-09-29T21:26:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:54 crc kubenswrapper[4911]: I0929 21:26:54.674154 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:54 crc kubenswrapper[4911]: I0929 21:26:54.674204 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:54 crc kubenswrapper[4911]: I0929 21:26:54.674217 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:54 crc kubenswrapper[4911]: I0929 21:26:54.674237 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:54 crc kubenswrapper[4911]: I0929 21:26:54.674250 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:54Z","lastTransitionTime":"2025-09-29T21:26:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:54 crc kubenswrapper[4911]: I0929 21:26:54.700984 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-d5gdh" Sep 29 21:26:54 crc kubenswrapper[4911]: E0929 21:26:54.701145 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-d5gdh" podUID="b53f9593-39bf-43e0-b1de-09192d0167cd" Sep 29 21:26:54 crc kubenswrapper[4911]: I0929 21:26:54.701360 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 21:26:54 crc kubenswrapper[4911]: E0929 21:26:54.701427 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 21:26:54 crc kubenswrapper[4911]: I0929 21:26:54.701538 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:26:54 crc kubenswrapper[4911]: E0929 21:26:54.701626 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 21:26:54 crc kubenswrapper[4911]: I0929 21:26:54.778345 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:54 crc kubenswrapper[4911]: I0929 21:26:54.778384 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:54 crc kubenswrapper[4911]: I0929 21:26:54.778394 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:54 crc kubenswrapper[4911]: I0929 21:26:54.778409 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:54 crc kubenswrapper[4911]: I0929 21:26:54.778421 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:54Z","lastTransitionTime":"2025-09-29T21:26:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:54 crc kubenswrapper[4911]: I0929 21:26:54.881042 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:54 crc kubenswrapper[4911]: I0929 21:26:54.881103 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:54 crc kubenswrapper[4911]: I0929 21:26:54.881114 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:54 crc kubenswrapper[4911]: I0929 21:26:54.881131 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:54 crc kubenswrapper[4911]: I0929 21:26:54.881141 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:54Z","lastTransitionTime":"2025-09-29T21:26:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:54 crc kubenswrapper[4911]: I0929 21:26:54.983851 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:54 crc kubenswrapper[4911]: I0929 21:26:54.983897 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:54 crc kubenswrapper[4911]: I0929 21:26:54.983911 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:54 crc kubenswrapper[4911]: I0929 21:26:54.983931 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:54 crc kubenswrapper[4911]: I0929 21:26:54.983943 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:54Z","lastTransitionTime":"2025-09-29T21:26:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:55 crc kubenswrapper[4911]: I0929 21:26:55.086523 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:55 crc kubenswrapper[4911]: I0929 21:26:55.086585 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:55 crc kubenswrapper[4911]: I0929 21:26:55.086595 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:55 crc kubenswrapper[4911]: I0929 21:26:55.086609 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:55 crc kubenswrapper[4911]: I0929 21:26:55.086620 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:55Z","lastTransitionTime":"2025-09-29T21:26:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:55 crc kubenswrapper[4911]: I0929 21:26:55.189561 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:55 crc kubenswrapper[4911]: I0929 21:26:55.189614 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:55 crc kubenswrapper[4911]: I0929 21:26:55.189626 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:55 crc kubenswrapper[4911]: I0929 21:26:55.189642 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:55 crc kubenswrapper[4911]: I0929 21:26:55.189653 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:55Z","lastTransitionTime":"2025-09-29T21:26:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:55 crc kubenswrapper[4911]: I0929 21:26:55.292906 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:55 crc kubenswrapper[4911]: I0929 21:26:55.292980 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:55 crc kubenswrapper[4911]: I0929 21:26:55.293002 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:55 crc kubenswrapper[4911]: I0929 21:26:55.293030 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:55 crc kubenswrapper[4911]: I0929 21:26:55.293051 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:55Z","lastTransitionTime":"2025-09-29T21:26:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:55 crc kubenswrapper[4911]: I0929 21:26:55.401577 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:55 crc kubenswrapper[4911]: I0929 21:26:55.401628 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:55 crc kubenswrapper[4911]: I0929 21:26:55.401637 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:55 crc kubenswrapper[4911]: I0929 21:26:55.401657 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:55 crc kubenswrapper[4911]: I0929 21:26:55.401668 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:55Z","lastTransitionTime":"2025-09-29T21:26:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:55 crc kubenswrapper[4911]: I0929 21:26:55.505981 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:55 crc kubenswrapper[4911]: I0929 21:26:55.506122 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:55 crc kubenswrapper[4911]: I0929 21:26:55.506150 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:55 crc kubenswrapper[4911]: I0929 21:26:55.506182 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:55 crc kubenswrapper[4911]: I0929 21:26:55.506207 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:55Z","lastTransitionTime":"2025-09-29T21:26:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:55 crc kubenswrapper[4911]: I0929 21:26:55.609477 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:55 crc kubenswrapper[4911]: I0929 21:26:55.609563 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:55 crc kubenswrapper[4911]: I0929 21:26:55.609582 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:55 crc kubenswrapper[4911]: I0929 21:26:55.609615 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:55 crc kubenswrapper[4911]: I0929 21:26:55.609637 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:55Z","lastTransitionTime":"2025-09-29T21:26:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:55 crc kubenswrapper[4911]: I0929 21:26:55.701019 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 21:26:55 crc kubenswrapper[4911]: E0929 21:26:55.701222 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 21:26:55 crc kubenswrapper[4911]: I0929 21:26:55.712453 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:55 crc kubenswrapper[4911]: I0929 21:26:55.712688 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:55 crc kubenswrapper[4911]: I0929 21:26:55.712883 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:55 crc kubenswrapper[4911]: I0929 21:26:55.713022 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:55 crc kubenswrapper[4911]: I0929 21:26:55.713162 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:55Z","lastTransitionTime":"2025-09-29T21:26:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:55 crc kubenswrapper[4911]: I0929 21:26:55.815729 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:55 crc kubenswrapper[4911]: I0929 21:26:55.815847 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:55 crc kubenswrapper[4911]: I0929 21:26:55.815868 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:55 crc kubenswrapper[4911]: I0929 21:26:55.815898 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:55 crc kubenswrapper[4911]: I0929 21:26:55.815918 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:55Z","lastTransitionTime":"2025-09-29T21:26:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:55 crc kubenswrapper[4911]: I0929 21:26:55.918958 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:55 crc kubenswrapper[4911]: I0929 21:26:55.919008 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:55 crc kubenswrapper[4911]: I0929 21:26:55.919021 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:55 crc kubenswrapper[4911]: I0929 21:26:55.919044 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:55 crc kubenswrapper[4911]: I0929 21:26:55.919057 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:55Z","lastTransitionTime":"2025-09-29T21:26:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:56 crc kubenswrapper[4911]: I0929 21:26:56.022280 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:56 crc kubenswrapper[4911]: I0929 21:26:56.022320 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:56 crc kubenswrapper[4911]: I0929 21:26:56.022329 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:56 crc kubenswrapper[4911]: I0929 21:26:56.022342 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:56 crc kubenswrapper[4911]: I0929 21:26:56.022352 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:56Z","lastTransitionTime":"2025-09-29T21:26:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:56 crc kubenswrapper[4911]: I0929 21:26:56.125563 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:56 crc kubenswrapper[4911]: I0929 21:26:56.125827 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:56 crc kubenswrapper[4911]: I0929 21:26:56.125851 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:56 crc kubenswrapper[4911]: I0929 21:26:56.125878 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:56 crc kubenswrapper[4911]: I0929 21:26:56.125897 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:56Z","lastTransitionTime":"2025-09-29T21:26:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:56 crc kubenswrapper[4911]: I0929 21:26:56.229299 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:56 crc kubenswrapper[4911]: I0929 21:26:56.229395 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:56 crc kubenswrapper[4911]: I0929 21:26:56.229425 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:56 crc kubenswrapper[4911]: I0929 21:26:56.229462 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:56 crc kubenswrapper[4911]: I0929 21:26:56.229491 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:56Z","lastTransitionTime":"2025-09-29T21:26:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:56 crc kubenswrapper[4911]: I0929 21:26:56.333360 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:56 crc kubenswrapper[4911]: I0929 21:26:56.333423 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:56 crc kubenswrapper[4911]: I0929 21:26:56.333446 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:56 crc kubenswrapper[4911]: I0929 21:26:56.333479 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:56 crc kubenswrapper[4911]: I0929 21:26:56.333504 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:56Z","lastTransitionTime":"2025-09-29T21:26:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:56 crc kubenswrapper[4911]: I0929 21:26:56.437333 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:56 crc kubenswrapper[4911]: I0929 21:26:56.437390 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:56 crc kubenswrapper[4911]: I0929 21:26:56.437407 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:56 crc kubenswrapper[4911]: I0929 21:26:56.437434 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:56 crc kubenswrapper[4911]: I0929 21:26:56.437452 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:56Z","lastTransitionTime":"2025-09-29T21:26:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:56 crc kubenswrapper[4911]: I0929 21:26:56.541044 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:56 crc kubenswrapper[4911]: I0929 21:26:56.541123 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:56 crc kubenswrapper[4911]: I0929 21:26:56.541142 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:56 crc kubenswrapper[4911]: I0929 21:26:56.541174 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:56 crc kubenswrapper[4911]: I0929 21:26:56.541193 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:56Z","lastTransitionTime":"2025-09-29T21:26:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:56 crc kubenswrapper[4911]: I0929 21:26:56.644285 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:56 crc kubenswrapper[4911]: I0929 21:26:56.644368 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:56 crc kubenswrapper[4911]: I0929 21:26:56.644387 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:56 crc kubenswrapper[4911]: I0929 21:26:56.644415 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:56 crc kubenswrapper[4911]: I0929 21:26:56.644442 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:56Z","lastTransitionTime":"2025-09-29T21:26:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:56 crc kubenswrapper[4911]: I0929 21:26:56.701101 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:26:56 crc kubenswrapper[4911]: I0929 21:26:56.701146 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 21:26:56 crc kubenswrapper[4911]: E0929 21:26:56.701414 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 21:26:56 crc kubenswrapper[4911]: I0929 21:26:56.701462 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-d5gdh" Sep 29 21:26:56 crc kubenswrapper[4911]: E0929 21:26:56.701588 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 21:26:56 crc kubenswrapper[4911]: E0929 21:26:56.701916 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-d5gdh" podUID="b53f9593-39bf-43e0-b1de-09192d0167cd" Sep 29 21:26:56 crc kubenswrapper[4911]: I0929 21:26:56.748091 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:56 crc kubenswrapper[4911]: I0929 21:26:56.748163 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:56 crc kubenswrapper[4911]: I0929 21:26:56.748182 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:56 crc kubenswrapper[4911]: I0929 21:26:56.748211 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:56 crc kubenswrapper[4911]: I0929 21:26:56.748237 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:56Z","lastTransitionTime":"2025-09-29T21:26:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:56 crc kubenswrapper[4911]: I0929 21:26:56.851053 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:56 crc kubenswrapper[4911]: I0929 21:26:56.851129 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:56 crc kubenswrapper[4911]: I0929 21:26:56.851147 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:56 crc kubenswrapper[4911]: I0929 21:26:56.851178 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:56 crc kubenswrapper[4911]: I0929 21:26:56.851198 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:56Z","lastTransitionTime":"2025-09-29T21:26:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:56 crc kubenswrapper[4911]: I0929 21:26:56.954174 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:56 crc kubenswrapper[4911]: I0929 21:26:56.954224 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:56 crc kubenswrapper[4911]: I0929 21:26:56.954236 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:56 crc kubenswrapper[4911]: I0929 21:26:56.954252 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:56 crc kubenswrapper[4911]: I0929 21:26:56.954261 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:56Z","lastTransitionTime":"2025-09-29T21:26:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:57 crc kubenswrapper[4911]: I0929 21:26:57.057724 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:57 crc kubenswrapper[4911]: I0929 21:26:57.057786 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:57 crc kubenswrapper[4911]: I0929 21:26:57.057852 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:57 crc kubenswrapper[4911]: I0929 21:26:57.057875 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:57 crc kubenswrapper[4911]: I0929 21:26:57.057889 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:57Z","lastTransitionTime":"2025-09-29T21:26:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:57 crc kubenswrapper[4911]: I0929 21:26:57.161090 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:57 crc kubenswrapper[4911]: I0929 21:26:57.161133 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:57 crc kubenswrapper[4911]: I0929 21:26:57.161144 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:57 crc kubenswrapper[4911]: I0929 21:26:57.161159 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:57 crc kubenswrapper[4911]: I0929 21:26:57.161172 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:57Z","lastTransitionTime":"2025-09-29T21:26:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:57 crc kubenswrapper[4911]: I0929 21:26:57.265445 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:57 crc kubenswrapper[4911]: I0929 21:26:57.265508 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:57 crc kubenswrapper[4911]: I0929 21:26:57.265520 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:57 crc kubenswrapper[4911]: I0929 21:26:57.265538 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:57 crc kubenswrapper[4911]: I0929 21:26:57.265549 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:57Z","lastTransitionTime":"2025-09-29T21:26:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:57 crc kubenswrapper[4911]: I0929 21:26:57.368712 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:57 crc kubenswrapper[4911]: I0929 21:26:57.368780 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:57 crc kubenswrapper[4911]: I0929 21:26:57.368808 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:57 crc kubenswrapper[4911]: I0929 21:26:57.368832 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:57 crc kubenswrapper[4911]: I0929 21:26:57.368847 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:57Z","lastTransitionTime":"2025-09-29T21:26:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:57 crc kubenswrapper[4911]: I0929 21:26:57.472861 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:57 crc kubenswrapper[4911]: I0929 21:26:57.472944 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:57 crc kubenswrapper[4911]: I0929 21:26:57.472964 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:57 crc kubenswrapper[4911]: I0929 21:26:57.472996 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:57 crc kubenswrapper[4911]: I0929 21:26:57.473018 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:57Z","lastTransitionTime":"2025-09-29T21:26:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:57 crc kubenswrapper[4911]: I0929 21:26:57.576675 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:57 crc kubenswrapper[4911]: I0929 21:26:57.576748 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:57 crc kubenswrapper[4911]: I0929 21:26:57.576774 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:57 crc kubenswrapper[4911]: I0929 21:26:57.576838 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:57 crc kubenswrapper[4911]: I0929 21:26:57.576865 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:57Z","lastTransitionTime":"2025-09-29T21:26:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:57 crc kubenswrapper[4911]: I0929 21:26:57.680107 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:57 crc kubenswrapper[4911]: I0929 21:26:57.680175 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:57 crc kubenswrapper[4911]: I0929 21:26:57.680194 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:57 crc kubenswrapper[4911]: I0929 21:26:57.680221 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:57 crc kubenswrapper[4911]: I0929 21:26:57.680240 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:57Z","lastTransitionTime":"2025-09-29T21:26:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:57 crc kubenswrapper[4911]: I0929 21:26:57.700676 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 21:26:57 crc kubenswrapper[4911]: E0929 21:26:57.700884 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 21:26:57 crc kubenswrapper[4911]: I0929 21:26:57.789010 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:57 crc kubenswrapper[4911]: I0929 21:26:57.789102 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:57 crc kubenswrapper[4911]: I0929 21:26:57.789129 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:57 crc kubenswrapper[4911]: I0929 21:26:57.789167 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:57 crc kubenswrapper[4911]: I0929 21:26:57.789196 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:57Z","lastTransitionTime":"2025-09-29T21:26:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:57 crc kubenswrapper[4911]: I0929 21:26:57.891692 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:57 crc kubenswrapper[4911]: I0929 21:26:57.891763 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:57 crc kubenswrapper[4911]: I0929 21:26:57.891781 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:57 crc kubenswrapper[4911]: I0929 21:26:57.891837 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:57 crc kubenswrapper[4911]: I0929 21:26:57.891857 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:57Z","lastTransitionTime":"2025-09-29T21:26:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:57 crc kubenswrapper[4911]: I0929 21:26:57.994074 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:57 crc kubenswrapper[4911]: I0929 21:26:57.994135 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:57 crc kubenswrapper[4911]: I0929 21:26:57.994150 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:57 crc kubenswrapper[4911]: I0929 21:26:57.994169 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:57 crc kubenswrapper[4911]: I0929 21:26:57.994181 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:57Z","lastTransitionTime":"2025-09-29T21:26:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:58 crc kubenswrapper[4911]: I0929 21:26:58.097514 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:58 crc kubenswrapper[4911]: I0929 21:26:58.097586 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:58 crc kubenswrapper[4911]: I0929 21:26:58.097603 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:58 crc kubenswrapper[4911]: I0929 21:26:58.097632 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:58 crc kubenswrapper[4911]: I0929 21:26:58.097651 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:58Z","lastTransitionTime":"2025-09-29T21:26:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:58 crc kubenswrapper[4911]: I0929 21:26:58.201146 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:58 crc kubenswrapper[4911]: I0929 21:26:58.201197 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:58 crc kubenswrapper[4911]: I0929 21:26:58.201207 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:58 crc kubenswrapper[4911]: I0929 21:26:58.201225 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:58 crc kubenswrapper[4911]: I0929 21:26:58.201235 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:58Z","lastTransitionTime":"2025-09-29T21:26:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:58 crc kubenswrapper[4911]: I0929 21:26:58.304468 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:58 crc kubenswrapper[4911]: I0929 21:26:58.304521 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:58 crc kubenswrapper[4911]: I0929 21:26:58.304530 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:58 crc kubenswrapper[4911]: I0929 21:26:58.304546 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:58 crc kubenswrapper[4911]: I0929 21:26:58.304556 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:58Z","lastTransitionTime":"2025-09-29T21:26:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:58 crc kubenswrapper[4911]: I0929 21:26:58.407662 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:58 crc kubenswrapper[4911]: I0929 21:26:58.407734 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:58 crc kubenswrapper[4911]: I0929 21:26:58.407745 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:58 crc kubenswrapper[4911]: I0929 21:26:58.407763 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:58 crc kubenswrapper[4911]: I0929 21:26:58.407777 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:58Z","lastTransitionTime":"2025-09-29T21:26:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:58 crc kubenswrapper[4911]: I0929 21:26:58.511773 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:58 crc kubenswrapper[4911]: I0929 21:26:58.511847 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:58 crc kubenswrapper[4911]: I0929 21:26:58.511857 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:58 crc kubenswrapper[4911]: I0929 21:26:58.511873 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:58 crc kubenswrapper[4911]: I0929 21:26:58.511883 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:58Z","lastTransitionTime":"2025-09-29T21:26:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:58 crc kubenswrapper[4911]: I0929 21:26:58.615553 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:58 crc kubenswrapper[4911]: I0929 21:26:58.615632 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:58 crc kubenswrapper[4911]: I0929 21:26:58.615653 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:58 crc kubenswrapper[4911]: I0929 21:26:58.615681 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:58 crc kubenswrapper[4911]: I0929 21:26:58.615703 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:58Z","lastTransitionTime":"2025-09-29T21:26:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:58 crc kubenswrapper[4911]: I0929 21:26:58.700675 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-d5gdh" Sep 29 21:26:58 crc kubenswrapper[4911]: I0929 21:26:58.700729 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 21:26:58 crc kubenswrapper[4911]: I0929 21:26:58.700751 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:26:58 crc kubenswrapper[4911]: E0929 21:26:58.701065 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 21:26:58 crc kubenswrapper[4911]: E0929 21:26:58.701377 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-d5gdh" podUID="b53f9593-39bf-43e0-b1de-09192d0167cd" Sep 29 21:26:58 crc kubenswrapper[4911]: E0929 21:26:58.701437 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 21:26:58 crc kubenswrapper[4911]: I0929 21:26:58.718129 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:58 crc kubenswrapper[4911]: I0929 21:26:58.718177 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:58 crc kubenswrapper[4911]: I0929 21:26:58.718188 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:58 crc kubenswrapper[4911]: I0929 21:26:58.718204 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:58 crc kubenswrapper[4911]: I0929 21:26:58.718216 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:58Z","lastTransitionTime":"2025-09-29T21:26:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:58 crc kubenswrapper[4911]: I0929 21:26:58.821854 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:58 crc kubenswrapper[4911]: I0929 21:26:58.822040 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:58 crc kubenswrapper[4911]: I0929 21:26:58.822069 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:58 crc kubenswrapper[4911]: I0929 21:26:58.822163 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:58 crc kubenswrapper[4911]: I0929 21:26:58.822262 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:58Z","lastTransitionTime":"2025-09-29T21:26:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:58 crc kubenswrapper[4911]: I0929 21:26:58.925582 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:58 crc kubenswrapper[4911]: I0929 21:26:58.925665 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:58 crc kubenswrapper[4911]: I0929 21:26:58.925690 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:58 crc kubenswrapper[4911]: I0929 21:26:58.925734 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:58 crc kubenswrapper[4911]: I0929 21:26:58.925760 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:58Z","lastTransitionTime":"2025-09-29T21:26:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:59 crc kubenswrapper[4911]: I0929 21:26:59.029484 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:59 crc kubenswrapper[4911]: I0929 21:26:59.029562 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:59 crc kubenswrapper[4911]: I0929 21:26:59.029579 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:59 crc kubenswrapper[4911]: I0929 21:26:59.029606 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:59 crc kubenswrapper[4911]: I0929 21:26:59.029622 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:59Z","lastTransitionTime":"2025-09-29T21:26:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:59 crc kubenswrapper[4911]: I0929 21:26:59.132507 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:59 crc kubenswrapper[4911]: I0929 21:26:59.132556 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:59 crc kubenswrapper[4911]: I0929 21:26:59.132565 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:59 crc kubenswrapper[4911]: I0929 21:26:59.132583 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:59 crc kubenswrapper[4911]: I0929 21:26:59.132592 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:59Z","lastTransitionTime":"2025-09-29T21:26:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:59 crc kubenswrapper[4911]: I0929 21:26:59.235406 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:59 crc kubenswrapper[4911]: I0929 21:26:59.235465 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:59 crc kubenswrapper[4911]: I0929 21:26:59.235475 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:59 crc kubenswrapper[4911]: I0929 21:26:59.235490 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:59 crc kubenswrapper[4911]: I0929 21:26:59.235500 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:59Z","lastTransitionTime":"2025-09-29T21:26:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:59 crc kubenswrapper[4911]: I0929 21:26:59.338880 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:59 crc kubenswrapper[4911]: I0929 21:26:59.338961 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:59 crc kubenswrapper[4911]: I0929 21:26:59.338979 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:59 crc kubenswrapper[4911]: I0929 21:26:59.339008 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:59 crc kubenswrapper[4911]: I0929 21:26:59.339034 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:59Z","lastTransitionTime":"2025-09-29T21:26:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:59 crc kubenswrapper[4911]: I0929 21:26:59.442774 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:59 crc kubenswrapper[4911]: I0929 21:26:59.442910 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:59 crc kubenswrapper[4911]: I0929 21:26:59.442935 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:59 crc kubenswrapper[4911]: I0929 21:26:59.442966 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:59 crc kubenswrapper[4911]: I0929 21:26:59.442984 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:59Z","lastTransitionTime":"2025-09-29T21:26:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:59 crc kubenswrapper[4911]: I0929 21:26:59.546929 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:59 crc kubenswrapper[4911]: I0929 21:26:59.546981 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:59 crc kubenswrapper[4911]: I0929 21:26:59.546991 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:59 crc kubenswrapper[4911]: I0929 21:26:59.547008 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:59 crc kubenswrapper[4911]: I0929 21:26:59.547018 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:59Z","lastTransitionTime":"2025-09-29T21:26:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:59 crc kubenswrapper[4911]: I0929 21:26:59.650931 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:59 crc kubenswrapper[4911]: I0929 21:26:59.651006 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:59 crc kubenswrapper[4911]: I0929 21:26:59.651024 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:59 crc kubenswrapper[4911]: I0929 21:26:59.651054 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:59 crc kubenswrapper[4911]: I0929 21:26:59.651075 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:59Z","lastTransitionTime":"2025-09-29T21:26:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:59 crc kubenswrapper[4911]: I0929 21:26:59.700024 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 21:26:59 crc kubenswrapper[4911]: E0929 21:26:59.700159 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 21:26:59 crc kubenswrapper[4911]: I0929 21:26:59.755518 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:59 crc kubenswrapper[4911]: I0929 21:26:59.755589 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:59 crc kubenswrapper[4911]: I0929 21:26:59.755616 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:59 crc kubenswrapper[4911]: I0929 21:26:59.755652 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:59 crc kubenswrapper[4911]: I0929 21:26:59.755678 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:59Z","lastTransitionTime":"2025-09-29T21:26:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:59 crc kubenswrapper[4911]: I0929 21:26:59.859310 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:59 crc kubenswrapper[4911]: I0929 21:26:59.859372 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:59 crc kubenswrapper[4911]: I0929 21:26:59.859397 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:59 crc kubenswrapper[4911]: I0929 21:26:59.859429 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:59 crc kubenswrapper[4911]: I0929 21:26:59.859452 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:59Z","lastTransitionTime":"2025-09-29T21:26:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:26:59 crc kubenswrapper[4911]: I0929 21:26:59.962548 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:26:59 crc kubenswrapper[4911]: I0929 21:26:59.962611 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:26:59 crc kubenswrapper[4911]: I0929 21:26:59.962629 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:26:59 crc kubenswrapper[4911]: I0929 21:26:59.962664 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:26:59 crc kubenswrapper[4911]: I0929 21:26:59.962683 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:26:59Z","lastTransitionTime":"2025-09-29T21:26:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:00 crc kubenswrapper[4911]: I0929 21:27:00.066648 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:00 crc kubenswrapper[4911]: I0929 21:27:00.066691 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:00 crc kubenswrapper[4911]: I0929 21:27:00.066699 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:00 crc kubenswrapper[4911]: I0929 21:27:00.066717 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:00 crc kubenswrapper[4911]: I0929 21:27:00.066730 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:00Z","lastTransitionTime":"2025-09-29T21:27:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:00 crc kubenswrapper[4911]: I0929 21:27:00.170102 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:00 crc kubenswrapper[4911]: I0929 21:27:00.170151 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:00 crc kubenswrapper[4911]: I0929 21:27:00.170163 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:00 crc kubenswrapper[4911]: I0929 21:27:00.170181 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:00 crc kubenswrapper[4911]: I0929 21:27:00.170193 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:00Z","lastTransitionTime":"2025-09-29T21:27:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:00 crc kubenswrapper[4911]: I0929 21:27:00.272868 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:00 crc kubenswrapper[4911]: I0929 21:27:00.272933 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:00 crc kubenswrapper[4911]: I0929 21:27:00.272943 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:00 crc kubenswrapper[4911]: I0929 21:27:00.272979 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:00 crc kubenswrapper[4911]: I0929 21:27:00.272990 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:00Z","lastTransitionTime":"2025-09-29T21:27:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:00 crc kubenswrapper[4911]: I0929 21:27:00.376496 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:00 crc kubenswrapper[4911]: I0929 21:27:00.376554 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:00 crc kubenswrapper[4911]: I0929 21:27:00.376566 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:00 crc kubenswrapper[4911]: I0929 21:27:00.376609 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:00 crc kubenswrapper[4911]: I0929 21:27:00.376645 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:00Z","lastTransitionTime":"2025-09-29T21:27:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:00 crc kubenswrapper[4911]: I0929 21:27:00.479555 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:00 crc kubenswrapper[4911]: I0929 21:27:00.479643 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:00 crc kubenswrapper[4911]: I0929 21:27:00.479665 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:00 crc kubenswrapper[4911]: I0929 21:27:00.479697 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:00 crc kubenswrapper[4911]: I0929 21:27:00.479722 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:00Z","lastTransitionTime":"2025-09-29T21:27:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:00 crc kubenswrapper[4911]: I0929 21:27:00.585104 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:00 crc kubenswrapper[4911]: I0929 21:27:00.585155 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:00 crc kubenswrapper[4911]: I0929 21:27:00.585166 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:00 crc kubenswrapper[4911]: I0929 21:27:00.585185 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:00 crc kubenswrapper[4911]: I0929 21:27:00.585198 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:00Z","lastTransitionTime":"2025-09-29T21:27:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:00 crc kubenswrapper[4911]: I0929 21:27:00.629267 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:00 crc kubenswrapper[4911]: I0929 21:27:00.629313 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:00 crc kubenswrapper[4911]: I0929 21:27:00.629325 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:00 crc kubenswrapper[4911]: I0929 21:27:00.629340 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:00 crc kubenswrapper[4911]: I0929 21:27:00.629350 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:00Z","lastTransitionTime":"2025-09-29T21:27:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:00 crc kubenswrapper[4911]: E0929 21:27:00.647918 4911 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:27:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:27:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:27:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:27:00Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:27:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:27:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:27:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:27:00Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7dded9b5-8ab5-45b2-be5a-4613b6e8208f\\\",\\\"systemUUID\\\":\\\"6cb362cf-0841-40fb-a840-f46642f78745\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:27:00Z is after 2025-08-24T17:21:41Z" Sep 29 21:27:00 crc kubenswrapper[4911]: I0929 21:27:00.652237 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:00 crc kubenswrapper[4911]: I0929 21:27:00.652271 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:00 crc kubenswrapper[4911]: I0929 21:27:00.652281 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:00 crc kubenswrapper[4911]: I0929 21:27:00.652296 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:00 crc kubenswrapper[4911]: I0929 21:27:00.652311 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:00Z","lastTransitionTime":"2025-09-29T21:27:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:00 crc kubenswrapper[4911]: E0929 21:27:00.669605 4911 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:27:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:27:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:27:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:27:00Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:27:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:27:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:27:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:27:00Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7dded9b5-8ab5-45b2-be5a-4613b6e8208f\\\",\\\"systemUUID\\\":\\\"6cb362cf-0841-40fb-a840-f46642f78745\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:27:00Z is after 2025-08-24T17:21:41Z" Sep 29 21:27:00 crc kubenswrapper[4911]: I0929 21:27:00.674748 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:00 crc kubenswrapper[4911]: I0929 21:27:00.674818 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:00 crc kubenswrapper[4911]: I0929 21:27:00.674830 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:00 crc kubenswrapper[4911]: I0929 21:27:00.674846 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:00 crc kubenswrapper[4911]: I0929 21:27:00.674857 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:00Z","lastTransitionTime":"2025-09-29T21:27:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:00 crc kubenswrapper[4911]: E0929 21:27:00.688961 4911 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:27:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:27:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:27:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:27:00Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:27:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:27:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:27:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:27:00Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7dded9b5-8ab5-45b2-be5a-4613b6e8208f\\\",\\\"systemUUID\\\":\\\"6cb362cf-0841-40fb-a840-f46642f78745\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:27:00Z is after 2025-08-24T17:21:41Z" Sep 29 21:27:00 crc kubenswrapper[4911]: I0929 21:27:00.694025 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:00 crc kubenswrapper[4911]: I0929 21:27:00.694098 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:00 crc kubenswrapper[4911]: I0929 21:27:00.694117 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:00 crc kubenswrapper[4911]: I0929 21:27:00.694142 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:00 crc kubenswrapper[4911]: I0929 21:27:00.694160 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:00Z","lastTransitionTime":"2025-09-29T21:27:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:00 crc kubenswrapper[4911]: I0929 21:27:00.700469 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 21:27:00 crc kubenswrapper[4911]: I0929 21:27:00.700472 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:27:00 crc kubenswrapper[4911]: I0929 21:27:00.700469 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-d5gdh" Sep 29 21:27:00 crc kubenswrapper[4911]: E0929 21:27:00.700598 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 21:27:00 crc kubenswrapper[4911]: E0929 21:27:00.700865 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 21:27:00 crc kubenswrapper[4911]: E0929 21:27:00.700942 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-d5gdh" podUID="b53f9593-39bf-43e0-b1de-09192d0167cd" Sep 29 21:27:00 crc kubenswrapper[4911]: E0929 21:27:00.711693 4911 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:27:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:27:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:27:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:27:00Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:27:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:27:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:27:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:27:00Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7dded9b5-8ab5-45b2-be5a-4613b6e8208f\\\",\\\"systemUUID\\\":\\\"6cb362cf-0841-40fb-a840-f46642f78745\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:27:00Z is after 2025-08-24T17:21:41Z" Sep 29 21:27:00 crc kubenswrapper[4911]: I0929 21:27:00.716666 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:00 crc kubenswrapper[4911]: I0929 21:27:00.716732 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:00 crc kubenswrapper[4911]: I0929 21:27:00.716754 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:00 crc kubenswrapper[4911]: I0929 21:27:00.716777 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:00 crc kubenswrapper[4911]: I0929 21:27:00.716823 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:00Z","lastTransitionTime":"2025-09-29T21:27:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:00 crc kubenswrapper[4911]: E0929 21:27:00.735683 4911 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:27:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:27:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:27:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:27:00Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:27:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:27:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T21:27:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T21:27:00Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7dded9b5-8ab5-45b2-be5a-4613b6e8208f\\\",\\\"systemUUID\\\":\\\"6cb362cf-0841-40fb-a840-f46642f78745\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:27:00Z is after 2025-08-24T17:21:41Z" Sep 29 21:27:00 crc kubenswrapper[4911]: E0929 21:27:00.735929 4911 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 29 21:27:00 crc kubenswrapper[4911]: I0929 21:27:00.737784 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:00 crc kubenswrapper[4911]: I0929 21:27:00.737863 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:00 crc kubenswrapper[4911]: I0929 21:27:00.737885 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:00 crc kubenswrapper[4911]: I0929 21:27:00.737909 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:00 crc kubenswrapper[4911]: I0929 21:27:00.737930 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:00Z","lastTransitionTime":"2025-09-29T21:27:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:00 crc kubenswrapper[4911]: I0929 21:27:00.841366 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:00 crc kubenswrapper[4911]: I0929 21:27:00.841409 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:00 crc kubenswrapper[4911]: I0929 21:27:00.841420 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:00 crc kubenswrapper[4911]: I0929 21:27:00.841436 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:00 crc kubenswrapper[4911]: I0929 21:27:00.841447 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:00Z","lastTransitionTime":"2025-09-29T21:27:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:00 crc kubenswrapper[4911]: I0929 21:27:00.944248 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:00 crc kubenswrapper[4911]: I0929 21:27:00.944321 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:00 crc kubenswrapper[4911]: I0929 21:27:00.944341 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:00 crc kubenswrapper[4911]: I0929 21:27:00.944371 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:00 crc kubenswrapper[4911]: I0929 21:27:00.944392 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:00Z","lastTransitionTime":"2025-09-29T21:27:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:01 crc kubenswrapper[4911]: I0929 21:27:01.047698 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:01 crc kubenswrapper[4911]: I0929 21:27:01.047754 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:01 crc kubenswrapper[4911]: I0929 21:27:01.047763 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:01 crc kubenswrapper[4911]: I0929 21:27:01.047782 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:01 crc kubenswrapper[4911]: I0929 21:27:01.047810 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:01Z","lastTransitionTime":"2025-09-29T21:27:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:01 crc kubenswrapper[4911]: I0929 21:27:01.151827 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:01 crc kubenswrapper[4911]: I0929 21:27:01.151961 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:01 crc kubenswrapper[4911]: I0929 21:27:01.151990 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:01 crc kubenswrapper[4911]: I0929 21:27:01.152029 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:01 crc kubenswrapper[4911]: I0929 21:27:01.152058 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:01Z","lastTransitionTime":"2025-09-29T21:27:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:01 crc kubenswrapper[4911]: I0929 21:27:01.255731 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:01 crc kubenswrapper[4911]: I0929 21:27:01.255803 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:01 crc kubenswrapper[4911]: I0929 21:27:01.255816 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:01 crc kubenswrapper[4911]: I0929 21:27:01.255840 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:01 crc kubenswrapper[4911]: I0929 21:27:01.255870 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:01Z","lastTransitionTime":"2025-09-29T21:27:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:01 crc kubenswrapper[4911]: I0929 21:27:01.295727 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b53f9593-39bf-43e0-b1de-09192d0167cd-metrics-certs\") pod \"network-metrics-daemon-d5gdh\" (UID: \"b53f9593-39bf-43e0-b1de-09192d0167cd\") " pod="openshift-multus/network-metrics-daemon-d5gdh" Sep 29 21:27:01 crc kubenswrapper[4911]: E0929 21:27:01.296039 4911 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 21:27:01 crc kubenswrapper[4911]: E0929 21:27:01.296183 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b53f9593-39bf-43e0-b1de-09192d0167cd-metrics-certs podName:b53f9593-39bf-43e0-b1de-09192d0167cd nodeName:}" failed. No retries permitted until 2025-09-29 21:28:05.296147663 +0000 UTC m=+163.273260364 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/b53f9593-39bf-43e0-b1de-09192d0167cd-metrics-certs") pod "network-metrics-daemon-d5gdh" (UID: "b53f9593-39bf-43e0-b1de-09192d0167cd") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 21:27:01 crc kubenswrapper[4911]: I0929 21:27:01.358609 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:01 crc kubenswrapper[4911]: I0929 21:27:01.358663 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:01 crc kubenswrapper[4911]: I0929 21:27:01.358682 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:01 crc kubenswrapper[4911]: I0929 21:27:01.358709 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:01 crc kubenswrapper[4911]: I0929 21:27:01.358729 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:01Z","lastTransitionTime":"2025-09-29T21:27:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:01 crc kubenswrapper[4911]: I0929 21:27:01.461976 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:01 crc kubenswrapper[4911]: I0929 21:27:01.462045 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:01 crc kubenswrapper[4911]: I0929 21:27:01.462059 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:01 crc kubenswrapper[4911]: I0929 21:27:01.462084 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:01 crc kubenswrapper[4911]: I0929 21:27:01.462103 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:01Z","lastTransitionTime":"2025-09-29T21:27:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:01 crc kubenswrapper[4911]: I0929 21:27:01.565763 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:01 crc kubenswrapper[4911]: I0929 21:27:01.565838 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:01 crc kubenswrapper[4911]: I0929 21:27:01.565851 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:01 crc kubenswrapper[4911]: I0929 21:27:01.565872 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:01 crc kubenswrapper[4911]: I0929 21:27:01.565895 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:01Z","lastTransitionTime":"2025-09-29T21:27:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:01 crc kubenswrapper[4911]: I0929 21:27:01.670087 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:01 crc kubenswrapper[4911]: I0929 21:27:01.670162 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:01 crc kubenswrapper[4911]: I0929 21:27:01.670176 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:01 crc kubenswrapper[4911]: I0929 21:27:01.670200 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:01 crc kubenswrapper[4911]: I0929 21:27:01.670216 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:01Z","lastTransitionTime":"2025-09-29T21:27:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:01 crc kubenswrapper[4911]: I0929 21:27:01.700782 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 21:27:01 crc kubenswrapper[4911]: E0929 21:27:01.701037 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 21:27:01 crc kubenswrapper[4911]: I0929 21:27:01.772923 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:01 crc kubenswrapper[4911]: I0929 21:27:01.773011 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:01 crc kubenswrapper[4911]: I0929 21:27:01.773035 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:01 crc kubenswrapper[4911]: I0929 21:27:01.773071 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:01 crc kubenswrapper[4911]: I0929 21:27:01.773090 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:01Z","lastTransitionTime":"2025-09-29T21:27:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:01 crc kubenswrapper[4911]: I0929 21:27:01.876328 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:01 crc kubenswrapper[4911]: I0929 21:27:01.876418 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:01 crc kubenswrapper[4911]: I0929 21:27:01.876445 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:01 crc kubenswrapper[4911]: I0929 21:27:01.876489 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:01 crc kubenswrapper[4911]: I0929 21:27:01.876516 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:01Z","lastTransitionTime":"2025-09-29T21:27:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:01 crc kubenswrapper[4911]: I0929 21:27:01.979997 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:01 crc kubenswrapper[4911]: I0929 21:27:01.980061 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:01 crc kubenswrapper[4911]: I0929 21:27:01.980080 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:01 crc kubenswrapper[4911]: I0929 21:27:01.980108 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:01 crc kubenswrapper[4911]: I0929 21:27:01.980129 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:01Z","lastTransitionTime":"2025-09-29T21:27:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:02 crc kubenswrapper[4911]: I0929 21:27:02.083117 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:02 crc kubenswrapper[4911]: I0929 21:27:02.083175 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:02 crc kubenswrapper[4911]: I0929 21:27:02.083185 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:02 crc kubenswrapper[4911]: I0929 21:27:02.083208 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:02 crc kubenswrapper[4911]: I0929 21:27:02.083218 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:02Z","lastTransitionTime":"2025-09-29T21:27:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:02 crc kubenswrapper[4911]: I0929 21:27:02.185758 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:02 crc kubenswrapper[4911]: I0929 21:27:02.185842 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:02 crc kubenswrapper[4911]: I0929 21:27:02.185855 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:02 crc kubenswrapper[4911]: I0929 21:27:02.185871 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:02 crc kubenswrapper[4911]: I0929 21:27:02.185880 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:02Z","lastTransitionTime":"2025-09-29T21:27:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:02 crc kubenswrapper[4911]: I0929 21:27:02.288743 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:02 crc kubenswrapper[4911]: I0929 21:27:02.288879 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:02 crc kubenswrapper[4911]: I0929 21:27:02.288901 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:02 crc kubenswrapper[4911]: I0929 21:27:02.288935 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:02 crc kubenswrapper[4911]: I0929 21:27:02.288954 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:02Z","lastTransitionTime":"2025-09-29T21:27:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:02 crc kubenswrapper[4911]: I0929 21:27:02.391899 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:02 crc kubenswrapper[4911]: I0929 21:27:02.391953 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:02 crc kubenswrapper[4911]: I0929 21:27:02.391965 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:02 crc kubenswrapper[4911]: I0929 21:27:02.391985 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:02 crc kubenswrapper[4911]: I0929 21:27:02.391999 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:02Z","lastTransitionTime":"2025-09-29T21:27:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:02 crc kubenswrapper[4911]: I0929 21:27:02.495929 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:02 crc kubenswrapper[4911]: I0929 21:27:02.495972 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:02 crc kubenswrapper[4911]: I0929 21:27:02.495981 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:02 crc kubenswrapper[4911]: I0929 21:27:02.496002 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:02 crc kubenswrapper[4911]: I0929 21:27:02.496014 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:02Z","lastTransitionTime":"2025-09-29T21:27:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:02 crc kubenswrapper[4911]: I0929 21:27:02.599414 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:02 crc kubenswrapper[4911]: I0929 21:27:02.599473 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:02 crc kubenswrapper[4911]: I0929 21:27:02.599489 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:02 crc kubenswrapper[4911]: I0929 21:27:02.599512 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:02 crc kubenswrapper[4911]: I0929 21:27:02.599526 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:02Z","lastTransitionTime":"2025-09-29T21:27:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:02 crc kubenswrapper[4911]: I0929 21:27:02.700518 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 21:27:02 crc kubenswrapper[4911]: E0929 21:27:02.700677 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 21:27:02 crc kubenswrapper[4911]: I0929 21:27:02.700695 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:27:02 crc kubenswrapper[4911]: E0929 21:27:02.700773 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 21:27:02 crc kubenswrapper[4911]: I0929 21:27:02.700902 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-d5gdh" Sep 29 21:27:02 crc kubenswrapper[4911]: E0929 21:27:02.701226 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-d5gdh" podUID="b53f9593-39bf-43e0-b1de-09192d0167cd" Sep 29 21:27:02 crc kubenswrapper[4911]: I0929 21:27:02.704317 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:02 crc kubenswrapper[4911]: I0929 21:27:02.704366 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:02 crc kubenswrapper[4911]: I0929 21:27:02.704385 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:02 crc kubenswrapper[4911]: I0929 21:27:02.704410 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:02 crc kubenswrapper[4911]: I0929 21:27:02.704429 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:02Z","lastTransitionTime":"2025-09-29T21:27:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:02 crc kubenswrapper[4911]: I0929 21:27:02.719542 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:27:02Z is after 2025-08-24T17:21:41Z" Sep 29 21:27:02 crc kubenswrapper[4911]: I0929 21:27:02.739031 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:27:02Z is after 2025-08-24T17:21:41Z" Sep 29 21:27:02 crc kubenswrapper[4911]: I0929 21:27:02.755009 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4052e710e0a06bf38d26e6006248d090051284bc0eae7a68b17241ef8016909\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a7d7fad5efb67cc7ef796977f03cc9c0e69fcc4340ca28749fffcc13236ed8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:27:02Z is after 2025-08-24T17:21:41Z" Sep 29 21:27:02 crc kubenswrapper[4911]: I0929 21:27:02.770457 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lrfbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1179c900-e866-4c5a-bb06-6032cc03a075\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc7df21f84f39b5c19f6039c6a102c478f158684a5952a968322654eecec14fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bb16b2f889ccaf66b96ffea2f6585967c14530a95a95000b652ce817e97c37e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T21:26:32Z\\\",\\\"message\\\":\\\"2025-09-29T21:25:46+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_ffacfa39-705c-4aac-940e-47d0cb57c1d5\\\\n2025-09-29T21:25:46+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_ffacfa39-705c-4aac-940e-47d0cb57c1d5 to /host/opt/cni/bin/\\\\n2025-09-29T21:25:47Z [verbose] multus-daemon started\\\\n2025-09-29T21:25:47Z [verbose] Readiness Indicator file check\\\\n2025-09-29T21:26:32Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:26:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jwhfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lrfbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:27:02Z is after 2025-08-24T17:21:41Z" Sep 29 21:27:02 crc kubenswrapper[4911]: I0929 21:27:02.793633 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e3aa70f-b0da-44c9-a850-96d4494b02fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3568f71a4a1762e0d9be8ce4c317471f3f99a7c5b92b7daeb9fbace778f7e66b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00681163bfe474dabf977e0c43ac71148c2a472efd4bc57f3d145ba1134a74aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db1941e9959584d925b6ac546c234ce0dfaa2960a1277f282b51deb5ee56ec87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e87c1e61cde26c6ae4496e13ebe8e46857c8656720399f3d89b9e9a75bb4681d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c06f6555e780f2f9f69cabcd3e26595acfde3e973387ed7b10345c673c17f5d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c62aa953604f4ce35ca351ecf4e03da067af96f004c3ba3ea5f3a54b89def33d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9af670a0090bf77a03f27eaa0928873b2cbb836a1ac9bd6e176711e06eff996f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9af670a0090bf77a03f27eaa0928873b2cbb836a1ac9bd6e176711e06eff996f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T21:26:36Z\\\",\\\"message\\\":\\\"36.673520 6929 base_network_controller_pods.go:477] [default/openshift-multus/network-metrics-daemon-d5gdh] creating logical port openshift-multus_network-metrics-daemon-d5gdh for pod on switch crc\\\\nI0929 21:26:36.673566 6929 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-cluster-version/cluster-version-operator\\\\\\\"}\\\\nI0929 21:26:36.673209 6929 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in node crc\\\\nI0929 21:26:36.673611 6929 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb after 0 failed attempt(s)\\\\nI0929 21:26:36.673612 6929 services_controller.go:360] Finished syncing service cluster-version-operator on namespace openshift-cluster-version for network=default : 7.002132ms\\\\nF0929 21:26:36.673632 6929 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node net\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:26:35Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-9wxd8_openshift-ovn-kubernetes(4e3aa70f-b0da-44c9-a850-96d4494b02fc)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68ec54cc6e84d45b9550a2dcbd53b3ffc8cc97cb94da8d9961b29965af08b7e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnzb7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9wxd8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:27:02Z is after 2025-08-24T17:21:41Z" Sep 29 21:27:02 crc kubenswrapper[4911]: I0929 21:27:02.807481 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:02 crc kubenswrapper[4911]: I0929 21:27:02.807603 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:02 crc kubenswrapper[4911]: I0929 21:27:02.807703 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:02 crc kubenswrapper[4911]: I0929 21:27:02.807836 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:02 crc kubenswrapper[4911]: I0929 21:27:02.807865 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:02Z","lastTransitionTime":"2025-09-29T21:27:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:02 crc kubenswrapper[4911]: I0929 21:27:02.810962 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-d5gdh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b53f9593-39bf-43e0-b1de-09192d0167cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-24xkh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-24xkh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:57Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-d5gdh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:27:02Z is after 2025-08-24T17:21:41Z" Sep 29 21:27:02 crc kubenswrapper[4911]: I0929 21:27:02.826381 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f56832a-7fd7-428e-85ee-55c5dea7b67a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ff3cf4013e69405637fe1483d5eb8ab2d12a0ba222c8446d93445dcb94ac9b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b32578d3e75bb839ad134f88850ba1b26e57846431fac71b64c5ddbce802464\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4539e6f57be0c686d5ea6ccbf1537c7c93698f81d2fdcfe5a143ce1f014e09\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57c9f68393ebed4fd5fe219544b0585a743989f8c54239e19ec702bddf9c30f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9dfa222360c949632050bd1dc5453a14c0dfa0d4614282132ceea2297405443e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T21:25:36Z\\\",\\\"message\\\":\\\"W0929 21:25:25.867198 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 21:25:25.867655 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759181125 cert, and key in /tmp/serving-cert-2091738680/serving-signer.crt, /tmp/serving-cert-2091738680/serving-signer.key\\\\nI0929 21:25:26.230899 1 observer_polling.go:159] Starting file observer\\\\nW0929 21:25:26.235864 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 21:25:26.236118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 21:25:26.238856 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2091738680/tls.crt::/tmp/serving-cert-2091738680/tls.key\\\\\\\"\\\\nF0929 21:25:36.606155 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47cf5d1e65f887e84d3c0694d5038edf3451c4af14d08a29d48562d05e36ea26\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc5ca1944ac403367ef9077ee23f7abe0698186a16b9131a3656b168e2cfb09c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:27:02Z is after 2025-08-24T17:21:41Z" Sep 29 21:27:02 crc kubenswrapper[4911]: I0929 21:27:02.845599 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8646497-f7ad-4b38-a69f-9a14345af1c8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d3bb437bdac371dd4e22f20f304a5381c128ac2b774c51bc4f748a1c7aa3834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://129d51f84c8798ed7b1078f34899d4231c7bf4e1a75212f694c6c027b8463b9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://290e37c77297fae7dcb3540911f6b2917e26daee3623c9106e34de394ccfa01f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7088f5a3752b8778de247eacb408d511197d1501e8e2811a98a9b713ca1fca04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:27:02Z is after 2025-08-24T17:21:41Z" Sep 29 21:27:02 crc kubenswrapper[4911]: I0929 21:27:02.860507 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:27:02Z is after 2025-08-24T17:21:41Z" Sep 29 21:27:02 crc kubenswrapper[4911]: I0929 21:27:02.875247 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a92f3bb864cba0c6615e8a07233e923c0d0681e83ad6722aa7c7043fb5966d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:27:02Z is after 2025-08-24T17:21:41Z" Sep 29 21:27:02 crc kubenswrapper[4911]: I0929 21:27:02.888387 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cfbgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5b94596-b945-4a89-b362-ec649e8e7981\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5154aa6a1a3a9b1a5a58752607c01b955e28b7e896bf9b99b5cca2448077ebd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm7xf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1f8c9f36d4fa9ccfe9c4fdf2794b819a0de928056fc3c320b6c83f9c518258f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm7xf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:56Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-cfbgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:27:02Z is after 2025-08-24T17:21:41Z" Sep 29 21:27:02 crc kubenswrapper[4911]: I0929 21:27:02.901617 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c88d1c5-bcf0-4d96-9c77-9c310fd1b492\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://521e40cd673dac9022f408c58f537d4504f4077392fc1ea2ebf51f126f7ae6c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f95f11783829a277767850aa2a8af98d27b5c5f205c36392671b69f368bffa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f95f11783829a277767850aa2a8af98d27b5c5f205c36392671b69f368bffa1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:27:02Z is after 2025-08-24T17:21:41Z" Sep 29 21:27:02 crc kubenswrapper[4911]: I0929 21:27:02.913147 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:02 crc kubenswrapper[4911]: I0929 21:27:02.913229 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:02 crc kubenswrapper[4911]: I0929 21:27:02.913256 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:02 crc kubenswrapper[4911]: I0929 21:27:02.913285 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:02 crc kubenswrapper[4911]: I0929 21:27:02.913306 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:02Z","lastTransitionTime":"2025-09-29T21:27:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:02 crc kubenswrapper[4911]: I0929 21:27:02.923768 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a2ba6d0-d11e-4f12-b9d7-f9a5b97d306a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:26:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af9e660ab2714b6ff9ddefb3634d2ae48dabf0a144b5f9ba96d429654fde989f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed4c0d97fe39092f5b2cf1e4575d8ea9238b60085270aec20f28727379f9a1a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4aab1c186df02eac1c6a9a54cf66510d44e4c63bad2da3cdbe53923869cc01cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7e8a2aec2b50058d3e0f16b0756724feebf54c180e50dbbec1dc319c31234bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7e8a2aec2b50058d3e0f16b0756724feebf54c180e50dbbec1dc319c31234bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:27:02Z is after 2025-08-24T17:21:41Z" Sep 29 21:27:02 crc kubenswrapper[4911]: I0929 21:27:02.942925 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dnhjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fe62a53-f0b8-4c66-8adf-3b9f8bef4195\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b11f03867ce6c678d04431a19903c21acd7665f0cca4c7b0344e9d1cf0e17e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64hpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dnhjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:27:02Z is after 2025-08-24T17:21:41Z" Sep 29 21:27:02 crc kubenswrapper[4911]: I0929 21:27:02.960700 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bp485" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"364060da-3bac-4f3e-b8b8-a64b0441cb5e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7dba9267c23bc8990d70b320a688e96796cf75aca99f5a3ce12fa82327cb2d3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7962008bbec5a6556b4ab76cf151a68e971bde07fdc5fe560c561f42cf597d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7962008bbec5a6556b4ab76cf151a68e971bde07fdc5fe560c561f42cf597d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a6af280cf9196a9ed2a1798f7f4e7d6504ef6707c1d265fb9b232c02d70c205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a6af280cf9196a9ed2a1798f7f4e7d6504ef6707c1d265fb9b232c02d70c205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4783145a17a1233241a3766d9b259d16ed82b2076ff6790c1a368d713271cd91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4783145a17a1233241a3766d9b259d16ed82b2076ff6790c1a368d713271cd91\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0bb5fec15ae4eb585d8ba1c0bedabe62a4094ca50044a09b44439dc555f064d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0bb5fec15ae4eb585d8ba1c0bedabe62a4094ca50044a09b44439dc555f064d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c153df9063b1a1e897e8d02e139d641d00d84dce42004be4c69dd55d44e7436b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c153df9063b1a1e897e8d02e139d641d00d84dce42004be4c69dd55d44e7436b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9acf5d7a17bf6bb6d1ad415d830594b8f6b986fbb7c58706b19d28546dfae6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9acf5d7a17bf6bb6d1ad415d830594b8f6b986fbb7c58706b19d28546dfae6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ng4nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bp485\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:27:02Z is after 2025-08-24T17:21:41Z" Sep 29 21:27:02 crc kubenswrapper[4911]: I0929 21:27:02.975922 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dz6zq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c16af8d-e647-4820-b96f-298cce113ab1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e64c28e47bb861649d58b34f5e3c369cfaf8c808435b32a13ebfc2c02b7b6db9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbgg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dz6zq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:27:02Z is after 2025-08-24T17:21:41Z" Sep 29 21:27:03 crc kubenswrapper[4911]: I0929 21:27:03.008001 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85fccbbf-637b-4195-89ba-828db0f10fb3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11c497bb84920c85649975dc5a3ba883723017d5503200e47c08427f45b9edfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a594d1677deac968c88fac4a64d4b047cf02ad8d6db74ee2ebe170d4135f588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2db0acd86c2dfcd0303626aa6b66832b18aec98dab908328288cdf0e0a18f24d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72188960402faa7956adf357824c6ea0a7015a75ed5191853ca95b12feb2cd28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf2588ddec2be0fa843630cdc268617c399a7a1615705cc4b088fc806031d557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0546d8270e2cbba06a241e421fa7ea130e53302a3f71f81829198d7ab8f3e6b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0546d8270e2cbba06a241e421fa7ea130e53302a3f71f81829198d7ab8f3e6b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123ea91d0a01f780d09196824c362eab68a4b8d943d414e80a3a0bbc2959e178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://123ea91d0a01f780d09196824c362eab68a4b8d943d414e80a3a0bbc2959e178\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:24Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://17751d3a9dd568ddb6c27efe08354f37aa4af7a91312e8c41e3e2e509d5be8ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://17751d3a9dd568ddb6c27efe08354f37aa4af7a91312e8c41e3e2e509d5be8ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T21:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T21:25:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:27:03Z is after 2025-08-24T17:21:41Z" Sep 29 21:27:03 crc kubenswrapper[4911]: I0929 21:27:03.016745 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:03 crc kubenswrapper[4911]: I0929 21:27:03.016905 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:03 crc kubenswrapper[4911]: I0929 21:27:03.016937 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:03 crc kubenswrapper[4911]: I0929 21:27:03.016975 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:03 crc kubenswrapper[4911]: I0929 21:27:03.017001 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:03Z","lastTransitionTime":"2025-09-29T21:27:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:03 crc kubenswrapper[4911]: I0929 21:27:03.029697 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c6a0af8cc03379c6fa2a3969e5852a340c32bd8513dc7c963e074bb1694d436\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:27:03Z is after 2025-08-24T17:21:41Z" Sep 29 21:27:03 crc kubenswrapper[4911]: I0929 21:27:03.044375 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w647f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50640abc-40db-4390-82d1-f3cfc76da71c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T21:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0479676aa753b6e60f9a57eb2a3055ddc5b10bce1547ac60e216c0865e79f24b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://568d099d7beea914cf9f9eb703e36dd179359d3f4406bae454334e39b0de79ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T21:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zmgnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T21:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w647f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T21:27:03Z is after 2025-08-24T17:21:41Z" Sep 29 21:27:03 crc kubenswrapper[4911]: I0929 21:27:03.120213 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:03 crc kubenswrapper[4911]: I0929 21:27:03.120298 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:03 crc kubenswrapper[4911]: I0929 21:27:03.120317 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:03 crc kubenswrapper[4911]: I0929 21:27:03.120344 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:03 crc kubenswrapper[4911]: I0929 21:27:03.120364 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:03Z","lastTransitionTime":"2025-09-29T21:27:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:03 crc kubenswrapper[4911]: I0929 21:27:03.223509 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:03 crc kubenswrapper[4911]: I0929 21:27:03.223569 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:03 crc kubenswrapper[4911]: I0929 21:27:03.223580 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:03 crc kubenswrapper[4911]: I0929 21:27:03.223606 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:03 crc kubenswrapper[4911]: I0929 21:27:03.223620 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:03Z","lastTransitionTime":"2025-09-29T21:27:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:03 crc kubenswrapper[4911]: I0929 21:27:03.327259 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:03 crc kubenswrapper[4911]: I0929 21:27:03.327329 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:03 crc kubenswrapper[4911]: I0929 21:27:03.327348 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:03 crc kubenswrapper[4911]: I0929 21:27:03.327379 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:03 crc kubenswrapper[4911]: I0929 21:27:03.327402 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:03Z","lastTransitionTime":"2025-09-29T21:27:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:03 crc kubenswrapper[4911]: I0929 21:27:03.430984 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:03 crc kubenswrapper[4911]: I0929 21:27:03.431063 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:03 crc kubenswrapper[4911]: I0929 21:27:03.431086 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:03 crc kubenswrapper[4911]: I0929 21:27:03.431117 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:03 crc kubenswrapper[4911]: I0929 21:27:03.431138 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:03Z","lastTransitionTime":"2025-09-29T21:27:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:03 crc kubenswrapper[4911]: I0929 21:27:03.535289 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:03 crc kubenswrapper[4911]: I0929 21:27:03.535394 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:03 crc kubenswrapper[4911]: I0929 21:27:03.535413 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:03 crc kubenswrapper[4911]: I0929 21:27:03.535444 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:03 crc kubenswrapper[4911]: I0929 21:27:03.535463 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:03Z","lastTransitionTime":"2025-09-29T21:27:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:03 crc kubenswrapper[4911]: I0929 21:27:03.639418 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:03 crc kubenswrapper[4911]: I0929 21:27:03.639509 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:03 crc kubenswrapper[4911]: I0929 21:27:03.639539 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:03 crc kubenswrapper[4911]: I0929 21:27:03.639577 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:03 crc kubenswrapper[4911]: I0929 21:27:03.639604 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:03Z","lastTransitionTime":"2025-09-29T21:27:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:03 crc kubenswrapper[4911]: I0929 21:27:03.701036 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 21:27:03 crc kubenswrapper[4911]: E0929 21:27:03.701212 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 21:27:03 crc kubenswrapper[4911]: I0929 21:27:03.702567 4911 scope.go:117] "RemoveContainer" containerID="9af670a0090bf77a03f27eaa0928873b2cbb836a1ac9bd6e176711e06eff996f" Sep 29 21:27:03 crc kubenswrapper[4911]: E0929 21:27:03.703014 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-9wxd8_openshift-ovn-kubernetes(4e3aa70f-b0da-44c9-a850-96d4494b02fc)\"" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" podUID="4e3aa70f-b0da-44c9-a850-96d4494b02fc" Sep 29 21:27:03 crc kubenswrapper[4911]: I0929 21:27:03.743054 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:03 crc kubenswrapper[4911]: I0929 21:27:03.743126 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:03 crc kubenswrapper[4911]: I0929 21:27:03.743145 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:03 crc kubenswrapper[4911]: I0929 21:27:03.743181 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:03 crc kubenswrapper[4911]: I0929 21:27:03.743202 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:03Z","lastTransitionTime":"2025-09-29T21:27:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:03 crc kubenswrapper[4911]: I0929 21:27:03.845962 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:03 crc kubenswrapper[4911]: I0929 21:27:03.846046 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:03 crc kubenswrapper[4911]: I0929 21:27:03.846059 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:03 crc kubenswrapper[4911]: I0929 21:27:03.846077 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:03 crc kubenswrapper[4911]: I0929 21:27:03.846087 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:03Z","lastTransitionTime":"2025-09-29T21:27:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:03 crc kubenswrapper[4911]: I0929 21:27:03.949673 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:03 crc kubenswrapper[4911]: I0929 21:27:03.949722 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:03 crc kubenswrapper[4911]: I0929 21:27:03.949734 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:03 crc kubenswrapper[4911]: I0929 21:27:03.949750 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:03 crc kubenswrapper[4911]: I0929 21:27:03.949761 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:03Z","lastTransitionTime":"2025-09-29T21:27:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:04 crc kubenswrapper[4911]: I0929 21:27:04.053864 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:04 crc kubenswrapper[4911]: I0929 21:27:04.053940 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:04 crc kubenswrapper[4911]: I0929 21:27:04.053963 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:04 crc kubenswrapper[4911]: I0929 21:27:04.053991 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:04 crc kubenswrapper[4911]: I0929 21:27:04.054045 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:04Z","lastTransitionTime":"2025-09-29T21:27:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:04 crc kubenswrapper[4911]: I0929 21:27:04.157058 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:04 crc kubenswrapper[4911]: I0929 21:27:04.157649 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:04 crc kubenswrapper[4911]: I0929 21:27:04.157860 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:04 crc kubenswrapper[4911]: I0929 21:27:04.158057 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:04 crc kubenswrapper[4911]: I0929 21:27:04.158189 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:04Z","lastTransitionTime":"2025-09-29T21:27:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:04 crc kubenswrapper[4911]: I0929 21:27:04.262265 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:04 crc kubenswrapper[4911]: I0929 21:27:04.262350 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:04 crc kubenswrapper[4911]: I0929 21:27:04.262372 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:04 crc kubenswrapper[4911]: I0929 21:27:04.262399 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:04 crc kubenswrapper[4911]: I0929 21:27:04.262418 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:04Z","lastTransitionTime":"2025-09-29T21:27:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:04 crc kubenswrapper[4911]: I0929 21:27:04.366569 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:04 crc kubenswrapper[4911]: I0929 21:27:04.366654 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:04 crc kubenswrapper[4911]: I0929 21:27:04.366678 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:04 crc kubenswrapper[4911]: I0929 21:27:04.366714 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:04 crc kubenswrapper[4911]: I0929 21:27:04.366744 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:04Z","lastTransitionTime":"2025-09-29T21:27:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:04 crc kubenswrapper[4911]: I0929 21:27:04.470649 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:04 crc kubenswrapper[4911]: I0929 21:27:04.470722 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:04 crc kubenswrapper[4911]: I0929 21:27:04.470746 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:04 crc kubenswrapper[4911]: I0929 21:27:04.470777 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:04 crc kubenswrapper[4911]: I0929 21:27:04.470836 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:04Z","lastTransitionTime":"2025-09-29T21:27:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:04 crc kubenswrapper[4911]: I0929 21:27:04.573934 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:04 crc kubenswrapper[4911]: I0929 21:27:04.574116 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:04 crc kubenswrapper[4911]: I0929 21:27:04.574144 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:04 crc kubenswrapper[4911]: I0929 21:27:04.574227 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:04 crc kubenswrapper[4911]: I0929 21:27:04.574249 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:04Z","lastTransitionTime":"2025-09-29T21:27:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:04 crc kubenswrapper[4911]: I0929 21:27:04.677768 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:04 crc kubenswrapper[4911]: I0929 21:27:04.677932 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:04 crc kubenswrapper[4911]: I0929 21:27:04.677955 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:04 crc kubenswrapper[4911]: I0929 21:27:04.677986 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:04 crc kubenswrapper[4911]: I0929 21:27:04.678007 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:04Z","lastTransitionTime":"2025-09-29T21:27:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:04 crc kubenswrapper[4911]: I0929 21:27:04.700625 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 21:27:04 crc kubenswrapper[4911]: E0929 21:27:04.700841 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 21:27:04 crc kubenswrapper[4911]: I0929 21:27:04.701145 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-d5gdh" Sep 29 21:27:04 crc kubenswrapper[4911]: I0929 21:27:04.701169 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:27:04 crc kubenswrapper[4911]: E0929 21:27:04.701267 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-d5gdh" podUID="b53f9593-39bf-43e0-b1de-09192d0167cd" Sep 29 21:27:04 crc kubenswrapper[4911]: E0929 21:27:04.701449 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 21:27:04 crc kubenswrapper[4911]: I0929 21:27:04.781731 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:04 crc kubenswrapper[4911]: I0929 21:27:04.781881 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:04 crc kubenswrapper[4911]: I0929 21:27:04.781917 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:04 crc kubenswrapper[4911]: I0929 21:27:04.781953 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:04 crc kubenswrapper[4911]: I0929 21:27:04.781980 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:04Z","lastTransitionTime":"2025-09-29T21:27:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:04 crc kubenswrapper[4911]: I0929 21:27:04.885658 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:04 crc kubenswrapper[4911]: I0929 21:27:04.885759 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:04 crc kubenswrapper[4911]: I0929 21:27:04.885772 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:04 crc kubenswrapper[4911]: I0929 21:27:04.885865 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:04 crc kubenswrapper[4911]: I0929 21:27:04.885880 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:04Z","lastTransitionTime":"2025-09-29T21:27:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:04 crc kubenswrapper[4911]: I0929 21:27:04.989669 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:04 crc kubenswrapper[4911]: I0929 21:27:04.989772 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:04 crc kubenswrapper[4911]: I0929 21:27:04.989840 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:04 crc kubenswrapper[4911]: I0929 21:27:04.989880 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:04 crc kubenswrapper[4911]: I0929 21:27:04.989907 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:04Z","lastTransitionTime":"2025-09-29T21:27:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:05 crc kubenswrapper[4911]: I0929 21:27:05.093425 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:05 crc kubenswrapper[4911]: I0929 21:27:05.093488 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:05 crc kubenswrapper[4911]: I0929 21:27:05.093501 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:05 crc kubenswrapper[4911]: I0929 21:27:05.093521 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:05 crc kubenswrapper[4911]: I0929 21:27:05.093533 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:05Z","lastTransitionTime":"2025-09-29T21:27:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:05 crc kubenswrapper[4911]: I0929 21:27:05.197274 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:05 crc kubenswrapper[4911]: I0929 21:27:05.197329 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:05 crc kubenswrapper[4911]: I0929 21:27:05.197339 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:05 crc kubenswrapper[4911]: I0929 21:27:05.197359 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:05 crc kubenswrapper[4911]: I0929 21:27:05.197371 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:05Z","lastTransitionTime":"2025-09-29T21:27:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:05 crc kubenswrapper[4911]: I0929 21:27:05.300701 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:05 crc kubenswrapper[4911]: I0929 21:27:05.300754 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:05 crc kubenswrapper[4911]: I0929 21:27:05.300764 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:05 crc kubenswrapper[4911]: I0929 21:27:05.300785 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:05 crc kubenswrapper[4911]: I0929 21:27:05.300811 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:05Z","lastTransitionTime":"2025-09-29T21:27:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:05 crc kubenswrapper[4911]: I0929 21:27:05.403235 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:05 crc kubenswrapper[4911]: I0929 21:27:05.403314 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:05 crc kubenswrapper[4911]: I0929 21:27:05.403335 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:05 crc kubenswrapper[4911]: I0929 21:27:05.403365 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:05 crc kubenswrapper[4911]: I0929 21:27:05.403385 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:05Z","lastTransitionTime":"2025-09-29T21:27:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:05 crc kubenswrapper[4911]: I0929 21:27:05.507329 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:05 crc kubenswrapper[4911]: I0929 21:27:05.507430 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:05 crc kubenswrapper[4911]: I0929 21:27:05.507456 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:05 crc kubenswrapper[4911]: I0929 21:27:05.507492 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:05 crc kubenswrapper[4911]: I0929 21:27:05.507519 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:05Z","lastTransitionTime":"2025-09-29T21:27:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:05 crc kubenswrapper[4911]: I0929 21:27:05.610088 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:05 crc kubenswrapper[4911]: I0929 21:27:05.610136 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:05 crc kubenswrapper[4911]: I0929 21:27:05.610146 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:05 crc kubenswrapper[4911]: I0929 21:27:05.610163 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:05 crc kubenswrapper[4911]: I0929 21:27:05.610173 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:05Z","lastTransitionTime":"2025-09-29T21:27:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:05 crc kubenswrapper[4911]: I0929 21:27:05.701120 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 21:27:05 crc kubenswrapper[4911]: E0929 21:27:05.701384 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 21:27:05 crc kubenswrapper[4911]: I0929 21:27:05.714977 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:05 crc kubenswrapper[4911]: I0929 21:27:05.715073 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:05 crc kubenswrapper[4911]: I0929 21:27:05.715097 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:05 crc kubenswrapper[4911]: I0929 21:27:05.715136 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:05 crc kubenswrapper[4911]: I0929 21:27:05.715172 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:05Z","lastTransitionTime":"2025-09-29T21:27:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:05 crc kubenswrapper[4911]: I0929 21:27:05.819879 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:05 crc kubenswrapper[4911]: I0929 21:27:05.819930 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:05 crc kubenswrapper[4911]: I0929 21:27:05.819939 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:05 crc kubenswrapper[4911]: I0929 21:27:05.819959 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:05 crc kubenswrapper[4911]: I0929 21:27:05.819970 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:05Z","lastTransitionTime":"2025-09-29T21:27:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:05 crc kubenswrapper[4911]: I0929 21:27:05.924673 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:05 crc kubenswrapper[4911]: I0929 21:27:05.924747 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:05 crc kubenswrapper[4911]: I0929 21:27:05.924767 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:05 crc kubenswrapper[4911]: I0929 21:27:05.924823 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:05 crc kubenswrapper[4911]: I0929 21:27:05.924844 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:05Z","lastTransitionTime":"2025-09-29T21:27:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:06 crc kubenswrapper[4911]: I0929 21:27:06.028430 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:06 crc kubenswrapper[4911]: I0929 21:27:06.028494 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:06 crc kubenswrapper[4911]: I0929 21:27:06.028518 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:06 crc kubenswrapper[4911]: I0929 21:27:06.028542 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:06 crc kubenswrapper[4911]: I0929 21:27:06.028580 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:06Z","lastTransitionTime":"2025-09-29T21:27:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:06 crc kubenswrapper[4911]: I0929 21:27:06.132256 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:06 crc kubenswrapper[4911]: I0929 21:27:06.132338 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:06 crc kubenswrapper[4911]: I0929 21:27:06.132357 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:06 crc kubenswrapper[4911]: I0929 21:27:06.132386 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:06 crc kubenswrapper[4911]: I0929 21:27:06.132406 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:06Z","lastTransitionTime":"2025-09-29T21:27:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:06 crc kubenswrapper[4911]: I0929 21:27:06.235889 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:06 crc kubenswrapper[4911]: I0929 21:27:06.235935 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:06 crc kubenswrapper[4911]: I0929 21:27:06.235945 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:06 crc kubenswrapper[4911]: I0929 21:27:06.235965 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:06 crc kubenswrapper[4911]: I0929 21:27:06.235975 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:06Z","lastTransitionTime":"2025-09-29T21:27:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:06 crc kubenswrapper[4911]: I0929 21:27:06.339659 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:06 crc kubenswrapper[4911]: I0929 21:27:06.339720 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:06 crc kubenswrapper[4911]: I0929 21:27:06.339730 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:06 crc kubenswrapper[4911]: I0929 21:27:06.339747 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:06 crc kubenswrapper[4911]: I0929 21:27:06.339760 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:06Z","lastTransitionTime":"2025-09-29T21:27:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:06 crc kubenswrapper[4911]: I0929 21:27:06.442773 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:06 crc kubenswrapper[4911]: I0929 21:27:06.442851 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:06 crc kubenswrapper[4911]: I0929 21:27:06.442866 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:06 crc kubenswrapper[4911]: I0929 21:27:06.442884 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:06 crc kubenswrapper[4911]: I0929 21:27:06.442957 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:06Z","lastTransitionTime":"2025-09-29T21:27:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:06 crc kubenswrapper[4911]: I0929 21:27:06.546688 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:06 crc kubenswrapper[4911]: I0929 21:27:06.546769 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:06 crc kubenswrapper[4911]: I0929 21:27:06.546781 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:06 crc kubenswrapper[4911]: I0929 21:27:06.546818 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:06 crc kubenswrapper[4911]: I0929 21:27:06.546833 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:06Z","lastTransitionTime":"2025-09-29T21:27:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:06 crc kubenswrapper[4911]: I0929 21:27:06.650052 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:06 crc kubenswrapper[4911]: I0929 21:27:06.650152 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:06 crc kubenswrapper[4911]: I0929 21:27:06.650192 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:06 crc kubenswrapper[4911]: I0929 21:27:06.650231 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:06 crc kubenswrapper[4911]: I0929 21:27:06.650256 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:06Z","lastTransitionTime":"2025-09-29T21:27:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:06 crc kubenswrapper[4911]: I0929 21:27:06.700597 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:27:06 crc kubenswrapper[4911]: I0929 21:27:06.700666 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 21:27:06 crc kubenswrapper[4911]: I0929 21:27:06.700887 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-d5gdh" Sep 29 21:27:06 crc kubenswrapper[4911]: E0929 21:27:06.701038 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 21:27:06 crc kubenswrapper[4911]: E0929 21:27:06.701125 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-d5gdh" podUID="b53f9593-39bf-43e0-b1de-09192d0167cd" Sep 29 21:27:06 crc kubenswrapper[4911]: E0929 21:27:06.701237 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 21:27:06 crc kubenswrapper[4911]: I0929 21:27:06.753671 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:06 crc kubenswrapper[4911]: I0929 21:27:06.753739 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:06 crc kubenswrapper[4911]: I0929 21:27:06.753756 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:06 crc kubenswrapper[4911]: I0929 21:27:06.753785 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:06 crc kubenswrapper[4911]: I0929 21:27:06.753830 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:06Z","lastTransitionTime":"2025-09-29T21:27:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:06 crc kubenswrapper[4911]: I0929 21:27:06.858119 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:06 crc kubenswrapper[4911]: I0929 21:27:06.858203 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:06 crc kubenswrapper[4911]: I0929 21:27:06.858223 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:06 crc kubenswrapper[4911]: I0929 21:27:06.858261 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:06 crc kubenswrapper[4911]: I0929 21:27:06.858286 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:06Z","lastTransitionTime":"2025-09-29T21:27:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:06 crc kubenswrapper[4911]: I0929 21:27:06.961849 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:06 crc kubenswrapper[4911]: I0929 21:27:06.961919 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:06 crc kubenswrapper[4911]: I0929 21:27:06.961943 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:06 crc kubenswrapper[4911]: I0929 21:27:06.961972 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:06 crc kubenswrapper[4911]: I0929 21:27:06.961990 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:06Z","lastTransitionTime":"2025-09-29T21:27:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:07 crc kubenswrapper[4911]: I0929 21:27:07.066520 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:07 crc kubenswrapper[4911]: I0929 21:27:07.066597 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:07 crc kubenswrapper[4911]: I0929 21:27:07.066618 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:07 crc kubenswrapper[4911]: I0929 21:27:07.066643 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:07 crc kubenswrapper[4911]: I0929 21:27:07.066662 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:07Z","lastTransitionTime":"2025-09-29T21:27:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:07 crc kubenswrapper[4911]: I0929 21:27:07.169177 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:07 crc kubenswrapper[4911]: I0929 21:27:07.169210 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:07 crc kubenswrapper[4911]: I0929 21:27:07.169221 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:07 crc kubenswrapper[4911]: I0929 21:27:07.169239 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:07 crc kubenswrapper[4911]: I0929 21:27:07.169252 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:07Z","lastTransitionTime":"2025-09-29T21:27:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:07 crc kubenswrapper[4911]: I0929 21:27:07.273008 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:07 crc kubenswrapper[4911]: I0929 21:27:07.273095 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:07 crc kubenswrapper[4911]: I0929 21:27:07.273117 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:07 crc kubenswrapper[4911]: I0929 21:27:07.273147 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:07 crc kubenswrapper[4911]: I0929 21:27:07.273167 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:07Z","lastTransitionTime":"2025-09-29T21:27:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:07 crc kubenswrapper[4911]: I0929 21:27:07.376458 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:07 crc kubenswrapper[4911]: I0929 21:27:07.376597 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:07 crc kubenswrapper[4911]: I0929 21:27:07.376624 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:07 crc kubenswrapper[4911]: I0929 21:27:07.376654 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:07 crc kubenswrapper[4911]: I0929 21:27:07.376711 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:07Z","lastTransitionTime":"2025-09-29T21:27:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:07 crc kubenswrapper[4911]: I0929 21:27:07.479898 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:07 crc kubenswrapper[4911]: I0929 21:27:07.479942 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:07 crc kubenswrapper[4911]: I0929 21:27:07.479953 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:07 crc kubenswrapper[4911]: I0929 21:27:07.479973 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:07 crc kubenswrapper[4911]: I0929 21:27:07.479984 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:07Z","lastTransitionTime":"2025-09-29T21:27:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:07 crc kubenswrapper[4911]: I0929 21:27:07.583981 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:07 crc kubenswrapper[4911]: I0929 21:27:07.584062 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:07 crc kubenswrapper[4911]: I0929 21:27:07.584089 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:07 crc kubenswrapper[4911]: I0929 21:27:07.584119 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:07 crc kubenswrapper[4911]: I0929 21:27:07.584139 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:07Z","lastTransitionTime":"2025-09-29T21:27:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:07 crc kubenswrapper[4911]: I0929 21:27:07.687318 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:07 crc kubenswrapper[4911]: I0929 21:27:07.687379 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:07 crc kubenswrapper[4911]: I0929 21:27:07.687396 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:07 crc kubenswrapper[4911]: I0929 21:27:07.687427 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:07 crc kubenswrapper[4911]: I0929 21:27:07.687446 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:07Z","lastTransitionTime":"2025-09-29T21:27:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:07 crc kubenswrapper[4911]: I0929 21:27:07.701000 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 21:27:07 crc kubenswrapper[4911]: E0929 21:27:07.701202 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 21:27:07 crc kubenswrapper[4911]: I0929 21:27:07.790307 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:07 crc kubenswrapper[4911]: I0929 21:27:07.790371 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:07 crc kubenswrapper[4911]: I0929 21:27:07.790390 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:07 crc kubenswrapper[4911]: I0929 21:27:07.790412 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:07 crc kubenswrapper[4911]: I0929 21:27:07.790434 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:07Z","lastTransitionTime":"2025-09-29T21:27:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:07 crc kubenswrapper[4911]: I0929 21:27:07.893860 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:07 crc kubenswrapper[4911]: I0929 21:27:07.893973 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:07 crc kubenswrapper[4911]: I0929 21:27:07.893995 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:07 crc kubenswrapper[4911]: I0929 21:27:07.894061 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:07 crc kubenswrapper[4911]: I0929 21:27:07.894084 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:07Z","lastTransitionTime":"2025-09-29T21:27:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:07 crc kubenswrapper[4911]: I0929 21:27:07.997253 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:07 crc kubenswrapper[4911]: I0929 21:27:07.997320 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:07 crc kubenswrapper[4911]: I0929 21:27:07.997337 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:07 crc kubenswrapper[4911]: I0929 21:27:07.997366 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:07 crc kubenswrapper[4911]: I0929 21:27:07.997385 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:07Z","lastTransitionTime":"2025-09-29T21:27:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:08 crc kubenswrapper[4911]: I0929 21:27:08.100921 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:08 crc kubenswrapper[4911]: I0929 21:27:08.101008 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:08 crc kubenswrapper[4911]: I0929 21:27:08.101029 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:08 crc kubenswrapper[4911]: I0929 21:27:08.101061 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:08 crc kubenswrapper[4911]: I0929 21:27:08.101080 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:08Z","lastTransitionTime":"2025-09-29T21:27:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:08 crc kubenswrapper[4911]: I0929 21:27:08.205104 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:08 crc kubenswrapper[4911]: I0929 21:27:08.205193 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:08 crc kubenswrapper[4911]: I0929 21:27:08.205221 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:08 crc kubenswrapper[4911]: I0929 21:27:08.205249 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:08 crc kubenswrapper[4911]: I0929 21:27:08.205271 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:08Z","lastTransitionTime":"2025-09-29T21:27:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:08 crc kubenswrapper[4911]: I0929 21:27:08.316003 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:08 crc kubenswrapper[4911]: I0929 21:27:08.316072 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:08 crc kubenswrapper[4911]: I0929 21:27:08.316091 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:08 crc kubenswrapper[4911]: I0929 21:27:08.316117 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:08 crc kubenswrapper[4911]: I0929 21:27:08.316140 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:08Z","lastTransitionTime":"2025-09-29T21:27:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:08 crc kubenswrapper[4911]: I0929 21:27:08.419711 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:08 crc kubenswrapper[4911]: I0929 21:27:08.419778 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:08 crc kubenswrapper[4911]: I0929 21:27:08.419833 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:08 crc kubenswrapper[4911]: I0929 21:27:08.419917 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:08 crc kubenswrapper[4911]: I0929 21:27:08.419949 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:08Z","lastTransitionTime":"2025-09-29T21:27:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:08 crc kubenswrapper[4911]: I0929 21:27:08.524377 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:08 crc kubenswrapper[4911]: I0929 21:27:08.525106 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:08 crc kubenswrapper[4911]: I0929 21:27:08.525193 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:08 crc kubenswrapper[4911]: I0929 21:27:08.525235 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:08 crc kubenswrapper[4911]: I0929 21:27:08.525256 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:08Z","lastTransitionTime":"2025-09-29T21:27:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:08 crc kubenswrapper[4911]: I0929 21:27:08.629693 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:08 crc kubenswrapper[4911]: I0929 21:27:08.629758 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:08 crc kubenswrapper[4911]: I0929 21:27:08.629772 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:08 crc kubenswrapper[4911]: I0929 21:27:08.629813 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:08 crc kubenswrapper[4911]: I0929 21:27:08.629831 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:08Z","lastTransitionTime":"2025-09-29T21:27:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:08 crc kubenswrapper[4911]: I0929 21:27:08.700389 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:27:08 crc kubenswrapper[4911]: I0929 21:27:08.700459 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-d5gdh" Sep 29 21:27:08 crc kubenswrapper[4911]: I0929 21:27:08.700389 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 21:27:08 crc kubenswrapper[4911]: E0929 21:27:08.700648 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 21:27:08 crc kubenswrapper[4911]: E0929 21:27:08.700733 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 21:27:08 crc kubenswrapper[4911]: E0929 21:27:08.700916 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-d5gdh" podUID="b53f9593-39bf-43e0-b1de-09192d0167cd" Sep 29 21:27:08 crc kubenswrapper[4911]: I0929 21:27:08.732874 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:08 crc kubenswrapper[4911]: I0929 21:27:08.732954 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:08 crc kubenswrapper[4911]: I0929 21:27:08.732978 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:08 crc kubenswrapper[4911]: I0929 21:27:08.733006 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:08 crc kubenswrapper[4911]: I0929 21:27:08.733028 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:08Z","lastTransitionTime":"2025-09-29T21:27:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:08 crc kubenswrapper[4911]: I0929 21:27:08.836849 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:08 crc kubenswrapper[4911]: I0929 21:27:08.836948 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:08 crc kubenswrapper[4911]: I0929 21:27:08.836977 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:08 crc kubenswrapper[4911]: I0929 21:27:08.837013 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:08 crc kubenswrapper[4911]: I0929 21:27:08.837042 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:08Z","lastTransitionTime":"2025-09-29T21:27:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:08 crc kubenswrapper[4911]: I0929 21:27:08.945835 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:08 crc kubenswrapper[4911]: I0929 21:27:08.945925 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:08 crc kubenswrapper[4911]: I0929 21:27:08.945952 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:08 crc kubenswrapper[4911]: I0929 21:27:08.945983 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:08 crc kubenswrapper[4911]: I0929 21:27:08.946003 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:08Z","lastTransitionTime":"2025-09-29T21:27:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:09 crc kubenswrapper[4911]: I0929 21:27:09.049951 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:09 crc kubenswrapper[4911]: I0929 21:27:09.050009 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:09 crc kubenswrapper[4911]: I0929 21:27:09.050022 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:09 crc kubenswrapper[4911]: I0929 21:27:09.050042 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:09 crc kubenswrapper[4911]: I0929 21:27:09.050056 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:09Z","lastTransitionTime":"2025-09-29T21:27:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:09 crc kubenswrapper[4911]: I0929 21:27:09.153026 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:09 crc kubenswrapper[4911]: I0929 21:27:09.153091 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:09 crc kubenswrapper[4911]: I0929 21:27:09.153107 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:09 crc kubenswrapper[4911]: I0929 21:27:09.153128 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:09 crc kubenswrapper[4911]: I0929 21:27:09.153143 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:09Z","lastTransitionTime":"2025-09-29T21:27:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:09 crc kubenswrapper[4911]: I0929 21:27:09.256717 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:09 crc kubenswrapper[4911]: I0929 21:27:09.256777 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:09 crc kubenswrapper[4911]: I0929 21:27:09.256854 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:09 crc kubenswrapper[4911]: I0929 21:27:09.256893 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:09 crc kubenswrapper[4911]: I0929 21:27:09.256917 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:09Z","lastTransitionTime":"2025-09-29T21:27:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:09 crc kubenswrapper[4911]: I0929 21:27:09.360723 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:09 crc kubenswrapper[4911]: I0929 21:27:09.360862 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:09 crc kubenswrapper[4911]: I0929 21:27:09.360892 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:09 crc kubenswrapper[4911]: I0929 21:27:09.360932 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:09 crc kubenswrapper[4911]: I0929 21:27:09.360958 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:09Z","lastTransitionTime":"2025-09-29T21:27:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:09 crc kubenswrapper[4911]: I0929 21:27:09.464397 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:09 crc kubenswrapper[4911]: I0929 21:27:09.465017 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:09 crc kubenswrapper[4911]: I0929 21:27:09.465113 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:09 crc kubenswrapper[4911]: I0929 21:27:09.465207 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:09 crc kubenswrapper[4911]: I0929 21:27:09.465295 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:09Z","lastTransitionTime":"2025-09-29T21:27:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:09 crc kubenswrapper[4911]: I0929 21:27:09.568564 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:09 crc kubenswrapper[4911]: I0929 21:27:09.569765 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:09 crc kubenswrapper[4911]: I0929 21:27:09.570018 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:09 crc kubenswrapper[4911]: I0929 21:27:09.570221 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:09 crc kubenswrapper[4911]: I0929 21:27:09.570405 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:09Z","lastTransitionTime":"2025-09-29T21:27:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:09 crc kubenswrapper[4911]: I0929 21:27:09.674526 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:09 crc kubenswrapper[4911]: I0929 21:27:09.674627 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:09 crc kubenswrapper[4911]: I0929 21:27:09.674656 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:09 crc kubenswrapper[4911]: I0929 21:27:09.674689 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:09 crc kubenswrapper[4911]: I0929 21:27:09.674710 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:09Z","lastTransitionTime":"2025-09-29T21:27:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:09 crc kubenswrapper[4911]: I0929 21:27:09.700395 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 21:27:09 crc kubenswrapper[4911]: E0929 21:27:09.700556 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 21:27:09 crc kubenswrapper[4911]: I0929 21:27:09.778255 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:09 crc kubenswrapper[4911]: I0929 21:27:09.778656 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:09 crc kubenswrapper[4911]: I0929 21:27:09.778871 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:09 crc kubenswrapper[4911]: I0929 21:27:09.779016 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:09 crc kubenswrapper[4911]: I0929 21:27:09.779183 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:09Z","lastTransitionTime":"2025-09-29T21:27:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:09 crc kubenswrapper[4911]: I0929 21:27:09.882518 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:09 crc kubenswrapper[4911]: I0929 21:27:09.882903 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:09 crc kubenswrapper[4911]: I0929 21:27:09.882923 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:09 crc kubenswrapper[4911]: I0929 21:27:09.882940 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:09 crc kubenswrapper[4911]: I0929 21:27:09.882951 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:09Z","lastTransitionTime":"2025-09-29T21:27:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:09 crc kubenswrapper[4911]: I0929 21:27:09.986596 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:09 crc kubenswrapper[4911]: I0929 21:27:09.987024 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:09 crc kubenswrapper[4911]: I0929 21:27:09.987231 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:09 crc kubenswrapper[4911]: I0929 21:27:09.987382 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:09 crc kubenswrapper[4911]: I0929 21:27:09.987569 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:09Z","lastTransitionTime":"2025-09-29T21:27:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:10 crc kubenswrapper[4911]: I0929 21:27:10.090330 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:10 crc kubenswrapper[4911]: I0929 21:27:10.090428 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:10 crc kubenswrapper[4911]: I0929 21:27:10.090484 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:10 crc kubenswrapper[4911]: I0929 21:27:10.090508 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:10 crc kubenswrapper[4911]: I0929 21:27:10.090524 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:10Z","lastTransitionTime":"2025-09-29T21:27:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:10 crc kubenswrapper[4911]: I0929 21:27:10.194253 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:10 crc kubenswrapper[4911]: I0929 21:27:10.194339 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:10 crc kubenswrapper[4911]: I0929 21:27:10.194366 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:10 crc kubenswrapper[4911]: I0929 21:27:10.194401 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:10 crc kubenswrapper[4911]: I0929 21:27:10.194427 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:10Z","lastTransitionTime":"2025-09-29T21:27:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:10 crc kubenswrapper[4911]: I0929 21:27:10.297754 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:10 crc kubenswrapper[4911]: I0929 21:27:10.297840 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:10 crc kubenswrapper[4911]: I0929 21:27:10.297858 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:10 crc kubenswrapper[4911]: I0929 21:27:10.297883 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:10 crc kubenswrapper[4911]: I0929 21:27:10.297900 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:10Z","lastTransitionTime":"2025-09-29T21:27:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:10 crc kubenswrapper[4911]: I0929 21:27:10.400406 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:10 crc kubenswrapper[4911]: I0929 21:27:10.400445 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:10 crc kubenswrapper[4911]: I0929 21:27:10.400459 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:10 crc kubenswrapper[4911]: I0929 21:27:10.400475 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:10 crc kubenswrapper[4911]: I0929 21:27:10.400489 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:10Z","lastTransitionTime":"2025-09-29T21:27:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:10 crc kubenswrapper[4911]: I0929 21:27:10.503345 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:10 crc kubenswrapper[4911]: I0929 21:27:10.503432 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:10 crc kubenswrapper[4911]: I0929 21:27:10.503459 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:10 crc kubenswrapper[4911]: I0929 21:27:10.503490 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:10 crc kubenswrapper[4911]: I0929 21:27:10.503510 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:10Z","lastTransitionTime":"2025-09-29T21:27:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:10 crc kubenswrapper[4911]: I0929 21:27:10.611293 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:10 crc kubenswrapper[4911]: I0929 21:27:10.611425 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:10 crc kubenswrapper[4911]: I0929 21:27:10.611452 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:10 crc kubenswrapper[4911]: I0929 21:27:10.611483 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:10 crc kubenswrapper[4911]: I0929 21:27:10.611511 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:10Z","lastTransitionTime":"2025-09-29T21:27:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:10 crc kubenswrapper[4911]: I0929 21:27:10.701266 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:27:10 crc kubenswrapper[4911]: I0929 21:27:10.701326 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-d5gdh" Sep 29 21:27:10 crc kubenswrapper[4911]: I0929 21:27:10.701444 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 21:27:10 crc kubenswrapper[4911]: E0929 21:27:10.701532 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 21:27:10 crc kubenswrapper[4911]: E0929 21:27:10.701745 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-d5gdh" podUID="b53f9593-39bf-43e0-b1de-09192d0167cd" Sep 29 21:27:10 crc kubenswrapper[4911]: E0929 21:27:10.702467 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 21:27:10 crc kubenswrapper[4911]: I0929 21:27:10.714311 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:10 crc kubenswrapper[4911]: I0929 21:27:10.714357 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:10 crc kubenswrapper[4911]: I0929 21:27:10.714369 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:10 crc kubenswrapper[4911]: I0929 21:27:10.714384 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:10 crc kubenswrapper[4911]: I0929 21:27:10.714397 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:10Z","lastTransitionTime":"2025-09-29T21:27:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:10 crc kubenswrapper[4911]: I0929 21:27:10.784865 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 21:27:10 crc kubenswrapper[4911]: I0929 21:27:10.784925 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 21:27:10 crc kubenswrapper[4911]: I0929 21:27:10.784943 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 21:27:10 crc kubenswrapper[4911]: I0929 21:27:10.784972 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 21:27:10 crc kubenswrapper[4911]: I0929 21:27:10.784993 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T21:27:10Z","lastTransitionTime":"2025-09-29T21:27:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 21:27:10 crc kubenswrapper[4911]: I0929 21:27:10.878467 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-mqkfx"] Sep 29 21:27:10 crc kubenswrapper[4911]: I0929 21:27:10.879169 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-mqkfx" Sep 29 21:27:10 crc kubenswrapper[4911]: I0929 21:27:10.885697 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Sep 29 21:27:10 crc kubenswrapper[4911]: I0929 21:27:10.886087 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Sep 29 21:27:10 crc kubenswrapper[4911]: I0929 21:27:10.886557 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Sep 29 21:27:10 crc kubenswrapper[4911]: I0929 21:27:10.888617 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Sep 29 21:27:10 crc kubenswrapper[4911]: I0929 21:27:10.929045 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podStartSLOduration=87.929008394 podStartE2EDuration="1m27.929008394s" podCreationTimestamp="2025-09-29 21:25:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:27:10.910229413 +0000 UTC m=+108.887342164" watchObservedRunningTime="2025-09-29 21:27:10.929008394 +0000 UTC m=+108.906121095" Sep 29 21:27:11 crc kubenswrapper[4911]: I0929 21:27:11.012695 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5d41d9c2-4cce-4621-a85f-da71bab08232-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-mqkfx\" (UID: \"5d41d9c2-4cce-4621-a85f-da71bab08232\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-mqkfx" Sep 29 21:27:11 crc kubenswrapper[4911]: I0929 21:27:11.012767 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/5d41d9c2-4cce-4621-a85f-da71bab08232-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-mqkfx\" (UID: \"5d41d9c2-4cce-4621-a85f-da71bab08232\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-mqkfx" Sep 29 21:27:11 crc kubenswrapper[4911]: I0929 21:27:11.012870 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/5d41d9c2-4cce-4621-a85f-da71bab08232-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-mqkfx\" (UID: \"5d41d9c2-4cce-4621-a85f-da71bab08232\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-mqkfx" Sep 29 21:27:11 crc kubenswrapper[4911]: I0929 21:27:11.012943 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5d41d9c2-4cce-4621-a85f-da71bab08232-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-mqkfx\" (UID: \"5d41d9c2-4cce-4621-a85f-da71bab08232\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-mqkfx" Sep 29 21:27:11 crc kubenswrapper[4911]: I0929 21:27:11.012991 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/5d41d9c2-4cce-4621-a85f-da71bab08232-service-ca\") pod \"cluster-version-operator-5c965bbfc6-mqkfx\" (UID: \"5d41d9c2-4cce-4621-a85f-da71bab08232\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-mqkfx" Sep 29 21:27:11 crc kubenswrapper[4911]: I0929 21:27:11.063106 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-lrfbg" podStartSLOduration=88.063073719 podStartE2EDuration="1m28.063073719s" podCreationTimestamp="2025-09-29 21:25:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:27:11.019260297 +0000 UTC m=+108.996372998" watchObservedRunningTime="2025-09-29 21:27:11.063073719 +0000 UTC m=+109.040186430" Sep 29 21:27:11 crc kubenswrapper[4911]: I0929 21:27:11.107107 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=90.107037365 podStartE2EDuration="1m30.107037365s" podCreationTimestamp="2025-09-29 21:25:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:27:11.106276922 +0000 UTC m=+109.083389653" watchObservedRunningTime="2025-09-29 21:27:11.107037365 +0000 UTC m=+109.084150116" Sep 29 21:27:11 crc kubenswrapper[4911]: I0929 21:27:11.114319 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5d41d9c2-4cce-4621-a85f-da71bab08232-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-mqkfx\" (UID: \"5d41d9c2-4cce-4621-a85f-da71bab08232\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-mqkfx" Sep 29 21:27:11 crc kubenswrapper[4911]: I0929 21:27:11.114401 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/5d41d9c2-4cce-4621-a85f-da71bab08232-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-mqkfx\" (UID: \"5d41d9c2-4cce-4621-a85f-da71bab08232\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-mqkfx" Sep 29 21:27:11 crc kubenswrapper[4911]: I0929 21:27:11.114467 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5d41d9c2-4cce-4621-a85f-da71bab08232-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-mqkfx\" (UID: \"5d41d9c2-4cce-4621-a85f-da71bab08232\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-mqkfx" Sep 29 21:27:11 crc kubenswrapper[4911]: I0929 21:27:11.114521 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/5d41d9c2-4cce-4621-a85f-da71bab08232-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-mqkfx\" (UID: \"5d41d9c2-4cce-4621-a85f-da71bab08232\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-mqkfx" Sep 29 21:27:11 crc kubenswrapper[4911]: I0929 21:27:11.114559 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/5d41d9c2-4cce-4621-a85f-da71bab08232-service-ca\") pod \"cluster-version-operator-5c965bbfc6-mqkfx\" (UID: \"5d41d9c2-4cce-4621-a85f-da71bab08232\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-mqkfx" Sep 29 21:27:11 crc kubenswrapper[4911]: I0929 21:27:11.114697 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/5d41d9c2-4cce-4621-a85f-da71bab08232-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-mqkfx\" (UID: \"5d41d9c2-4cce-4621-a85f-da71bab08232\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-mqkfx" Sep 29 21:27:11 crc kubenswrapper[4911]: I0929 21:27:11.114813 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/5d41d9c2-4cce-4621-a85f-da71bab08232-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-mqkfx\" (UID: \"5d41d9c2-4cce-4621-a85f-da71bab08232\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-mqkfx" Sep 29 21:27:11 crc kubenswrapper[4911]: I0929 21:27:11.116030 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/5d41d9c2-4cce-4621-a85f-da71bab08232-service-ca\") pod \"cluster-version-operator-5c965bbfc6-mqkfx\" (UID: \"5d41d9c2-4cce-4621-a85f-da71bab08232\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-mqkfx" Sep 29 21:27:11 crc kubenswrapper[4911]: I0929 21:27:11.123980 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5d41d9c2-4cce-4621-a85f-da71bab08232-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-mqkfx\" (UID: \"5d41d9c2-4cce-4621-a85f-da71bab08232\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-mqkfx" Sep 29 21:27:11 crc kubenswrapper[4911]: I0929 21:27:11.141497 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5d41d9c2-4cce-4621-a85f-da71bab08232-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-mqkfx\" (UID: \"5d41d9c2-4cce-4621-a85f-da71bab08232\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-mqkfx" Sep 29 21:27:11 crc kubenswrapper[4911]: I0929 21:27:11.165247 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=90.165202864 podStartE2EDuration="1m30.165202864s" podCreationTimestamp="2025-09-29 21:25:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:27:11.139611886 +0000 UTC m=+109.116724657" watchObservedRunningTime="2025-09-29 21:27:11.165202864 +0000 UTC m=+109.142315585" Sep 29 21:27:11 crc kubenswrapper[4911]: I0929 21:27:11.205395 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-mqkfx" Sep 29 21:27:11 crc kubenswrapper[4911]: I0929 21:27:11.222949 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cfbgk" podStartSLOduration=88.222913058 podStartE2EDuration="1m28.222913058s" podCreationTimestamp="2025-09-29 21:25:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:27:11.214891974 +0000 UTC m=+109.192004685" watchObservedRunningTime="2025-09-29 21:27:11.222913058 +0000 UTC m=+109.200025779" Sep 29 21:27:11 crc kubenswrapper[4911]: I0929 21:27:11.229777 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=29.229745676 podStartE2EDuration="29.229745676s" podCreationTimestamp="2025-09-29 21:26:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:27:11.229121066 +0000 UTC m=+109.206233777" watchObservedRunningTime="2025-09-29 21:27:11.229745676 +0000 UTC m=+109.206858357" Sep 29 21:27:11 crc kubenswrapper[4911]: I0929 21:27:11.251697 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=55.251674463 podStartE2EDuration="55.251674463s" podCreationTimestamp="2025-09-29 21:26:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:27:11.251484277 +0000 UTC m=+109.228596948" watchObservedRunningTime="2025-09-29 21:27:11.251674463 +0000 UTC m=+109.228787134" Sep 29 21:27:11 crc kubenswrapper[4911]: I0929 21:27:11.268399 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-dnhjh" podStartSLOduration=90.268364589 podStartE2EDuration="1m30.268364589s" podCreationTimestamp="2025-09-29 21:25:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:27:11.267415161 +0000 UTC m=+109.244527832" watchObservedRunningTime="2025-09-29 21:27:11.268364589 +0000 UTC m=+109.245477280" Sep 29 21:27:11 crc kubenswrapper[4911]: I0929 21:27:11.290250 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-bp485" podStartSLOduration=88.290218634 podStartE2EDuration="1m28.290218634s" podCreationTimestamp="2025-09-29 21:25:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:27:11.289618236 +0000 UTC m=+109.266730917" watchObservedRunningTime="2025-09-29 21:27:11.290218634 +0000 UTC m=+109.267331315" Sep 29 21:27:11 crc kubenswrapper[4911]: I0929 21:27:11.333587 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-dz6zq" podStartSLOduration=89.333559511 podStartE2EDuration="1m29.333559511s" podCreationTimestamp="2025-09-29 21:25:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:27:11.30193763 +0000 UTC m=+109.279050301" watchObservedRunningTime="2025-09-29 21:27:11.333559511 +0000 UTC m=+109.310672182" Sep 29 21:27:11 crc kubenswrapper[4911]: I0929 21:27:11.333697 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=86.333693346 podStartE2EDuration="1m26.333693346s" podCreationTimestamp="2025-09-29 21:25:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:27:11.333669835 +0000 UTC m=+109.310782536" watchObservedRunningTime="2025-09-29 21:27:11.333693346 +0000 UTC m=+109.310806017" Sep 29 21:27:11 crc kubenswrapper[4911]: I0929 21:27:11.393694 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-mqkfx" event={"ID":"5d41d9c2-4cce-4621-a85f-da71bab08232","Type":"ContainerStarted","Data":"09ee062e1ffc0aafb0e126e7c550c9250f4a0bf051d54fb3ca333a1c699933ce"} Sep 29 21:27:11 crc kubenswrapper[4911]: I0929 21:27:11.393766 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-mqkfx" event={"ID":"5d41d9c2-4cce-4621-a85f-da71bab08232","Type":"ContainerStarted","Data":"c2975452d50a732be7896fe9219d211e8f171372c6e1e0631f2a3cf0a8fcf8f2"} Sep 29 21:27:11 crc kubenswrapper[4911]: I0929 21:27:11.700773 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 21:27:11 crc kubenswrapper[4911]: E0929 21:27:11.701095 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 21:27:12 crc kubenswrapper[4911]: I0929 21:27:12.700522 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-d5gdh" Sep 29 21:27:12 crc kubenswrapper[4911]: I0929 21:27:12.702903 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:27:12 crc kubenswrapper[4911]: E0929 21:27:12.702880 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-d5gdh" podUID="b53f9593-39bf-43e0-b1de-09192d0167cd" Sep 29 21:27:12 crc kubenswrapper[4911]: I0929 21:27:12.702946 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 21:27:12 crc kubenswrapper[4911]: E0929 21:27:12.703541 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 21:27:12 crc kubenswrapper[4911]: E0929 21:27:12.703457 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 21:27:13 crc kubenswrapper[4911]: I0929 21:27:13.700045 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 21:27:13 crc kubenswrapper[4911]: E0929 21:27:13.700291 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 21:27:14 crc kubenswrapper[4911]: I0929 21:27:14.701112 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 21:27:14 crc kubenswrapper[4911]: I0929 21:27:14.701253 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-d5gdh" Sep 29 21:27:14 crc kubenswrapper[4911]: E0929 21:27:14.701288 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 21:27:14 crc kubenswrapper[4911]: E0929 21:27:14.701575 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-d5gdh" podUID="b53f9593-39bf-43e0-b1de-09192d0167cd" Sep 29 21:27:14 crc kubenswrapper[4911]: I0929 21:27:14.701761 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:27:14 crc kubenswrapper[4911]: E0929 21:27:14.701978 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 21:27:15 crc kubenswrapper[4911]: I0929 21:27:15.700210 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 21:27:15 crc kubenswrapper[4911]: E0929 21:27:15.700443 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 21:27:15 crc kubenswrapper[4911]: I0929 21:27:15.701538 4911 scope.go:117] "RemoveContainer" containerID="9af670a0090bf77a03f27eaa0928873b2cbb836a1ac9bd6e176711e06eff996f" Sep 29 21:27:15 crc kubenswrapper[4911]: E0929 21:27:15.701865 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-9wxd8_openshift-ovn-kubernetes(4e3aa70f-b0da-44c9-a850-96d4494b02fc)\"" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" podUID="4e3aa70f-b0da-44c9-a850-96d4494b02fc" Sep 29 21:27:16 crc kubenswrapper[4911]: I0929 21:27:16.701086 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 21:27:16 crc kubenswrapper[4911]: I0929 21:27:16.701211 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:27:16 crc kubenswrapper[4911]: E0929 21:27:16.701314 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 21:27:16 crc kubenswrapper[4911]: E0929 21:27:16.701469 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 21:27:16 crc kubenswrapper[4911]: I0929 21:27:16.702625 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-d5gdh" Sep 29 21:27:16 crc kubenswrapper[4911]: E0929 21:27:16.702847 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-d5gdh" podUID="b53f9593-39bf-43e0-b1de-09192d0167cd" Sep 29 21:27:17 crc kubenswrapper[4911]: I0929 21:27:17.700724 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 21:27:17 crc kubenswrapper[4911]: E0929 21:27:17.700936 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 21:27:18 crc kubenswrapper[4911]: I0929 21:27:18.422708 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-lrfbg_1179c900-e866-4c5a-bb06-6032cc03a075/kube-multus/1.log" Sep 29 21:27:18 crc kubenswrapper[4911]: I0929 21:27:18.424346 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-lrfbg_1179c900-e866-4c5a-bb06-6032cc03a075/kube-multus/0.log" Sep 29 21:27:18 crc kubenswrapper[4911]: I0929 21:27:18.424421 4911 generic.go:334] "Generic (PLEG): container finished" podID="1179c900-e866-4c5a-bb06-6032cc03a075" containerID="bc7df21f84f39b5c19f6039c6a102c478f158684a5952a968322654eecec14fd" exitCode=1 Sep 29 21:27:18 crc kubenswrapper[4911]: I0929 21:27:18.424467 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-lrfbg" event={"ID":"1179c900-e866-4c5a-bb06-6032cc03a075","Type":"ContainerDied","Data":"bc7df21f84f39b5c19f6039c6a102c478f158684a5952a968322654eecec14fd"} Sep 29 21:27:18 crc kubenswrapper[4911]: I0929 21:27:18.424516 4911 scope.go:117] "RemoveContainer" containerID="8bb16b2f889ccaf66b96ffea2f6585967c14530a95a95000b652ce817e97c37e" Sep 29 21:27:18 crc kubenswrapper[4911]: I0929 21:27:18.425327 4911 scope.go:117] "RemoveContainer" containerID="bc7df21f84f39b5c19f6039c6a102c478f158684a5952a968322654eecec14fd" Sep 29 21:27:18 crc kubenswrapper[4911]: E0929 21:27:18.426049 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-lrfbg_openshift-multus(1179c900-e866-4c5a-bb06-6032cc03a075)\"" pod="openshift-multus/multus-lrfbg" podUID="1179c900-e866-4c5a-bb06-6032cc03a075" Sep 29 21:27:18 crc kubenswrapper[4911]: I0929 21:27:18.462868 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-mqkfx" podStartSLOduration=95.462786641 podStartE2EDuration="1m35.462786641s" podCreationTimestamp="2025-09-29 21:25:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:27:11.40718418 +0000 UTC m=+109.384296881" watchObservedRunningTime="2025-09-29 21:27:18.462786641 +0000 UTC m=+116.439899352" Sep 29 21:27:18 crc kubenswrapper[4911]: I0929 21:27:18.700867 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-d5gdh" Sep 29 21:27:18 crc kubenswrapper[4911]: E0929 21:27:18.701408 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-d5gdh" podUID="b53f9593-39bf-43e0-b1de-09192d0167cd" Sep 29 21:27:18 crc kubenswrapper[4911]: I0929 21:27:18.701459 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 21:27:18 crc kubenswrapper[4911]: I0929 21:27:18.701755 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:27:18 crc kubenswrapper[4911]: E0929 21:27:18.702001 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 21:27:18 crc kubenswrapper[4911]: E0929 21:27:18.702193 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 21:27:19 crc kubenswrapper[4911]: I0929 21:27:19.430878 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-lrfbg_1179c900-e866-4c5a-bb06-6032cc03a075/kube-multus/1.log" Sep 29 21:27:19 crc kubenswrapper[4911]: I0929 21:27:19.700756 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 21:27:19 crc kubenswrapper[4911]: E0929 21:27:19.700964 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 21:27:20 crc kubenswrapper[4911]: I0929 21:27:20.700469 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 21:27:20 crc kubenswrapper[4911]: I0929 21:27:20.700555 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:27:20 crc kubenswrapper[4911]: E0929 21:27:20.700655 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 21:27:20 crc kubenswrapper[4911]: E0929 21:27:20.700775 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 21:27:20 crc kubenswrapper[4911]: I0929 21:27:20.700904 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-d5gdh" Sep 29 21:27:20 crc kubenswrapper[4911]: E0929 21:27:20.701234 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-d5gdh" podUID="b53f9593-39bf-43e0-b1de-09192d0167cd" Sep 29 21:27:21 crc kubenswrapper[4911]: I0929 21:27:21.701232 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 21:27:21 crc kubenswrapper[4911]: E0929 21:27:21.701885 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 21:27:22 crc kubenswrapper[4911]: E0929 21:27:22.668467 4911 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Sep 29 21:27:22 crc kubenswrapper[4911]: I0929 21:27:22.700077 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 21:27:22 crc kubenswrapper[4911]: I0929 21:27:22.700117 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:27:22 crc kubenswrapper[4911]: E0929 21:27:22.702419 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 21:27:22 crc kubenswrapper[4911]: I0929 21:27:22.702513 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-d5gdh" Sep 29 21:27:22 crc kubenswrapper[4911]: E0929 21:27:22.702604 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 21:27:22 crc kubenswrapper[4911]: E0929 21:27:22.702716 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-d5gdh" podUID="b53f9593-39bf-43e0-b1de-09192d0167cd" Sep 29 21:27:22 crc kubenswrapper[4911]: E0929 21:27:22.802356 4911 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Sep 29 21:27:23 crc kubenswrapper[4911]: I0929 21:27:23.700173 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 21:27:23 crc kubenswrapper[4911]: E0929 21:27:23.700418 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 21:27:24 crc kubenswrapper[4911]: I0929 21:27:24.700520 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:27:24 crc kubenswrapper[4911]: I0929 21:27:24.700601 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 21:27:24 crc kubenswrapper[4911]: E0929 21:27:24.700728 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 21:27:24 crc kubenswrapper[4911]: I0929 21:27:24.700601 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-d5gdh" Sep 29 21:27:24 crc kubenswrapper[4911]: E0929 21:27:24.700935 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 21:27:24 crc kubenswrapper[4911]: E0929 21:27:24.701019 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-d5gdh" podUID="b53f9593-39bf-43e0-b1de-09192d0167cd" Sep 29 21:27:25 crc kubenswrapper[4911]: I0929 21:27:25.700344 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 21:27:25 crc kubenswrapper[4911]: E0929 21:27:25.700556 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 21:27:26 crc kubenswrapper[4911]: I0929 21:27:26.700773 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 21:27:26 crc kubenswrapper[4911]: I0929 21:27:26.700824 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:27:26 crc kubenswrapper[4911]: E0929 21:27:26.701084 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 21:27:26 crc kubenswrapper[4911]: I0929 21:27:26.700856 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-d5gdh" Sep 29 21:27:26 crc kubenswrapper[4911]: E0929 21:27:26.701193 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 21:27:26 crc kubenswrapper[4911]: E0929 21:27:26.701330 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-d5gdh" podUID="b53f9593-39bf-43e0-b1de-09192d0167cd" Sep 29 21:27:27 crc kubenswrapper[4911]: I0929 21:27:27.700532 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 21:27:27 crc kubenswrapper[4911]: E0929 21:27:27.701539 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 21:27:27 crc kubenswrapper[4911]: I0929 21:27:27.703114 4911 scope.go:117] "RemoveContainer" containerID="9af670a0090bf77a03f27eaa0928873b2cbb836a1ac9bd6e176711e06eff996f" Sep 29 21:27:27 crc kubenswrapper[4911]: E0929 21:27:27.804787 4911 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Sep 29 21:27:28 crc kubenswrapper[4911]: I0929 21:27:28.475558 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-9wxd8_4e3aa70f-b0da-44c9-a850-96d4494b02fc/ovnkube-controller/3.log" Sep 29 21:27:28 crc kubenswrapper[4911]: I0929 21:27:28.480994 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" event={"ID":"4e3aa70f-b0da-44c9-a850-96d4494b02fc","Type":"ContainerStarted","Data":"aeace5fc3ce0a241b8c32a861db8955e1002329af2b76581fe4289bff5db9e3d"} Sep 29 21:27:28 crc kubenswrapper[4911]: I0929 21:27:28.481668 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" Sep 29 21:27:28 crc kubenswrapper[4911]: I0929 21:27:28.517267 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" podStartSLOduration=105.517235777 podStartE2EDuration="1m45.517235777s" podCreationTimestamp="2025-09-29 21:25:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:27:28.516506176 +0000 UTC m=+126.493618897" watchObservedRunningTime="2025-09-29 21:27:28.517235777 +0000 UTC m=+126.494348488" Sep 29 21:27:28 crc kubenswrapper[4911]: I0929 21:27:28.700408 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 21:27:28 crc kubenswrapper[4911]: I0929 21:27:28.700520 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:27:28 crc kubenswrapper[4911]: I0929 21:27:28.700520 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-d5gdh" Sep 29 21:27:28 crc kubenswrapper[4911]: E0929 21:27:28.700695 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 21:27:28 crc kubenswrapper[4911]: E0929 21:27:28.700944 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 21:27:28 crc kubenswrapper[4911]: E0929 21:27:28.701129 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-d5gdh" podUID="b53f9593-39bf-43e0-b1de-09192d0167cd" Sep 29 21:27:28 crc kubenswrapper[4911]: I0929 21:27:28.723566 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-d5gdh"] Sep 29 21:27:29 crc kubenswrapper[4911]: I0929 21:27:29.484359 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-d5gdh" Sep 29 21:27:29 crc kubenswrapper[4911]: E0929 21:27:29.484488 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-d5gdh" podUID="b53f9593-39bf-43e0-b1de-09192d0167cd" Sep 29 21:27:29 crc kubenswrapper[4911]: I0929 21:27:29.700737 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 21:27:29 crc kubenswrapper[4911]: E0929 21:27:29.701003 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 21:27:30 crc kubenswrapper[4911]: I0929 21:27:30.700980 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:27:30 crc kubenswrapper[4911]: I0929 21:27:30.701127 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 21:27:30 crc kubenswrapper[4911]: I0929 21:27:30.701327 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-d5gdh" Sep 29 21:27:30 crc kubenswrapper[4911]: E0929 21:27:30.701460 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 21:27:30 crc kubenswrapper[4911]: E0929 21:27:30.701568 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-d5gdh" podUID="b53f9593-39bf-43e0-b1de-09192d0167cd" Sep 29 21:27:30 crc kubenswrapper[4911]: E0929 21:27:30.701700 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 21:27:30 crc kubenswrapper[4911]: I0929 21:27:30.701715 4911 scope.go:117] "RemoveContainer" containerID="bc7df21f84f39b5c19f6039c6a102c478f158684a5952a968322654eecec14fd" Sep 29 21:27:31 crc kubenswrapper[4911]: I0929 21:27:31.499275 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-lrfbg_1179c900-e866-4c5a-bb06-6032cc03a075/kube-multus/1.log" Sep 29 21:27:31 crc kubenswrapper[4911]: I0929 21:27:31.500113 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-lrfbg" event={"ID":"1179c900-e866-4c5a-bb06-6032cc03a075","Type":"ContainerStarted","Data":"894308c4aead3d39450f470850392668331de7f234bd6595c49ea96fa39181d8"} Sep 29 21:27:31 crc kubenswrapper[4911]: I0929 21:27:31.699999 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 21:27:31 crc kubenswrapper[4911]: E0929 21:27:31.700157 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 21:27:32 crc kubenswrapper[4911]: I0929 21:27:32.701015 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 21:27:32 crc kubenswrapper[4911]: I0929 21:27:32.701147 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:27:32 crc kubenswrapper[4911]: E0929 21:27:32.703317 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 21:27:32 crc kubenswrapper[4911]: I0929 21:27:32.703358 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-d5gdh" Sep 29 21:27:32 crc kubenswrapper[4911]: E0929 21:27:32.703501 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 21:27:32 crc kubenswrapper[4911]: E0929 21:27:32.704346 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-d5gdh" podUID="b53f9593-39bf-43e0-b1de-09192d0167cd" Sep 29 21:27:33 crc kubenswrapper[4911]: I0929 21:27:33.700766 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 21:27:33 crc kubenswrapper[4911]: I0929 21:27:33.703664 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Sep 29 21:27:33 crc kubenswrapper[4911]: I0929 21:27:33.704544 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Sep 29 21:27:34 crc kubenswrapper[4911]: I0929 21:27:34.700998 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 21:27:34 crc kubenswrapper[4911]: I0929 21:27:34.701056 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:27:34 crc kubenswrapper[4911]: I0929 21:27:34.701097 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-d5gdh" Sep 29 21:27:34 crc kubenswrapper[4911]: I0929 21:27:34.706113 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Sep 29 21:27:34 crc kubenswrapper[4911]: I0929 21:27:34.706153 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Sep 29 21:27:34 crc kubenswrapper[4911]: I0929 21:27:34.706113 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Sep 29 21:27:34 crc kubenswrapper[4911]: I0929 21:27:34.706516 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.330853 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.375179 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-2flr8"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.376046 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-2flr8" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.377291 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-rtrxr"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.381424 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-rtrxr" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.381996 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-wb4m9"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.387476 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-wb4m9" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.398686 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.399144 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.399602 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.399776 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.400027 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.400166 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.400289 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.400429 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.400570 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.400696 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.400857 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.401020 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.401152 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.401309 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.401485 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.401653 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.405371 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.406066 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-8djg4"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.406640 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.406876 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.406872 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.406929 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.407417 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.418302 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-6sql5"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.418846 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-f7vrh"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.419047 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.419133 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-ms4bf"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.419465 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.419529 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2fdjj"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.419780 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6sql5" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.419834 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-f7vrh" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.419852 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-szrp2"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.419891 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-ms4bf" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.420163 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-dl4br"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.420556 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2fdjj" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.421702 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-szrp2" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.429091 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-k7lgs"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.429631 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-k7lgs" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.430358 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-dl4br" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.434059 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-h9qcg"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.434597 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-h9qcg" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.435020 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.435431 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.435471 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.435583 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.435593 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.435859 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.435937 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.436028 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.436097 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.436120 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.436220 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.436249 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.436535 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.436591 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.436656 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.436748 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.436865 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.436955 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.436984 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.437109 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.436617 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.437267 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.435888 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.437410 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.437504 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.437560 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.437587 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.437644 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.437669 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.437967 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.438177 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.438211 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.471228 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.471558 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.471745 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.472908 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.473835 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-c5pgg"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.475433 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.475759 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-rd6cq"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.480698 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-c5pgg" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.482620 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.482979 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.514079 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.514425 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.516080 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.516652 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.517182 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.517602 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.518269 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.518724 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.519887 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.520047 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.521185 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.521324 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.521417 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.522140 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.522764 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.523025 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.523153 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-mrbmd"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.523235 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-rd6cq" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.523266 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.523828 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-mrbmd" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.524028 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.524309 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e66f4856-1a57-441f-9701-4f61008259c6-audit-dir\") pod \"apiserver-7bbb656c7d-6sql5\" (UID: \"e66f4856-1a57-441f-9701-4f61008259c6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6sql5" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.524347 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lmpqp\" (UniqueName: \"kubernetes.io/projected/d55a1ae9-2e28-49f6-904b-67a246fda7e6-kube-api-access-lmpqp\") pod \"machine-api-operator-5694c8668f-2flr8\" (UID: \"d55a1ae9-2e28-49f6-904b-67a246fda7e6\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-2flr8" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.524373 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.524371 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/e66f4856-1a57-441f-9701-4f61008259c6-audit-policies\") pod \"apiserver-7bbb656c7d-6sql5\" (UID: \"e66f4856-1a57-441f-9701-4f61008259c6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6sql5" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.524460 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-kxz4b"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.524493 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/d55a1ae9-2e28-49f6-904b-67a246fda7e6-images\") pod \"machine-api-operator-5694c8668f-2flr8\" (UID: \"d55a1ae9-2e28-49f6-904b-67a246fda7e6\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-2flr8" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.524515 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.524519 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/96f91b1a-e276-4bc1-9308-5375745c803c-service-ca\") pod \"console-f9d7485db-szrp2\" (UID: \"96f91b1a-e276-4bc1-9308-5375745c803c\") " pod="openshift-console/console-f9d7485db-szrp2" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.524546 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/3f8faa64-f7f4-4ff5-a016-bb927dd0ec78-audit\") pod \"apiserver-76f77b778f-rtrxr\" (UID: \"3f8faa64-f7f4-4ff5-a016-bb927dd0ec78\") " pod="openshift-apiserver/apiserver-76f77b778f-rtrxr" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.524565 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3f8faa64-f7f4-4ff5-a016-bb927dd0ec78-trusted-ca-bundle\") pod \"apiserver-76f77b778f-rtrxr\" (UID: \"3f8faa64-f7f4-4ff5-a016-bb927dd0ec78\") " pod="openshift-apiserver/apiserver-76f77b778f-rtrxr" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.524588 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/96f91b1a-e276-4bc1-9308-5375745c803c-console-config\") pod \"console-f9d7485db-szrp2\" (UID: \"96f91b1a-e276-4bc1-9308-5375745c803c\") " pod="openshift-console/console-f9d7485db-szrp2" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.524605 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/3f8faa64-f7f4-4ff5-a016-bb927dd0ec78-node-pullsecrets\") pod \"apiserver-76f77b778f-rtrxr\" (UID: \"3f8faa64-f7f4-4ff5-a016-bb927dd0ec78\") " pod="openshift-apiserver/apiserver-76f77b778f-rtrxr" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.524624 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/3f8faa64-f7f4-4ff5-a016-bb927dd0ec78-etcd-serving-ca\") pod \"apiserver-76f77b778f-rtrxr\" (UID: \"3f8faa64-f7f4-4ff5-a016-bb927dd0ec78\") " pod="openshift-apiserver/apiserver-76f77b778f-rtrxr" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.524641 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/96f91b1a-e276-4bc1-9308-5375745c803c-trusted-ca-bundle\") pod \"console-f9d7485db-szrp2\" (UID: \"96f91b1a-e276-4bc1-9308-5375745c803c\") " pod="openshift-console/console-f9d7485db-szrp2" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.524659 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3f8faa64-f7f4-4ff5-a016-bb927dd0ec78-serving-cert\") pod \"apiserver-76f77b778f-rtrxr\" (UID: \"3f8faa64-f7f4-4ff5-a016-bb927dd0ec78\") " pod="openshift-apiserver/apiserver-76f77b778f-rtrxr" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.524680 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/3f8faa64-f7f4-4ff5-a016-bb927dd0ec78-encryption-config\") pod \"apiserver-76f77b778f-rtrxr\" (UID: \"3f8faa64-f7f4-4ff5-a016-bb927dd0ec78\") " pod="openshift-apiserver/apiserver-76f77b778f-rtrxr" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.524701 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/e66f4856-1a57-441f-9701-4f61008259c6-etcd-client\") pod \"apiserver-7bbb656c7d-6sql5\" (UID: \"e66f4856-1a57-441f-9701-4f61008259c6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6sql5" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.524722 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vlfjd\" (UniqueName: \"kubernetes.io/projected/e66f4856-1a57-441f-9701-4f61008259c6-kube-api-access-vlfjd\") pod \"apiserver-7bbb656c7d-6sql5\" (UID: \"e66f4856-1a57-441f-9701-4f61008259c6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6sql5" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.524741 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/96f91b1a-e276-4bc1-9308-5375745c803c-console-oauth-config\") pod \"console-f9d7485db-szrp2\" (UID: \"96f91b1a-e276-4bc1-9308-5375745c803c\") " pod="openshift-console/console-f9d7485db-szrp2" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.524761 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/96f91b1a-e276-4bc1-9308-5375745c803c-oauth-serving-cert\") pod \"console-f9d7485db-szrp2\" (UID: \"96f91b1a-e276-4bc1-9308-5375745c803c\") " pod="openshift-console/console-f9d7485db-szrp2" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.524777 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f8faa64-f7f4-4ff5-a016-bb927dd0ec78-config\") pod \"apiserver-76f77b778f-rtrxr\" (UID: \"3f8faa64-f7f4-4ff5-a016-bb927dd0ec78\") " pod="openshift-apiserver/apiserver-76f77b778f-rtrxr" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.524843 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/e66f4856-1a57-441f-9701-4f61008259c6-encryption-config\") pod \"apiserver-7bbb656c7d-6sql5\" (UID: \"e66f4856-1a57-441f-9701-4f61008259c6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6sql5" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.524875 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/3f8faa64-f7f4-4ff5-a016-bb927dd0ec78-audit-dir\") pod \"apiserver-76f77b778f-rtrxr\" (UID: \"3f8faa64-f7f4-4ff5-a016-bb927dd0ec78\") " pod="openshift-apiserver/apiserver-76f77b778f-rtrxr" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.524900 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c75rb\" (UniqueName: \"kubernetes.io/projected/3f8faa64-f7f4-4ff5-a016-bb927dd0ec78-kube-api-access-c75rb\") pod \"apiserver-76f77b778f-rtrxr\" (UID: \"3f8faa64-f7f4-4ff5-a016-bb927dd0ec78\") " pod="openshift-apiserver/apiserver-76f77b778f-rtrxr" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.524925 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d55a1ae9-2e28-49f6-904b-67a246fda7e6-config\") pod \"machine-api-operator-5694c8668f-2flr8\" (UID: \"d55a1ae9-2e28-49f6-904b-67a246fda7e6\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-2flr8" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.524942 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/3f8faa64-f7f4-4ff5-a016-bb927dd0ec78-image-import-ca\") pod \"apiserver-76f77b778f-rtrxr\" (UID: \"3f8faa64-f7f4-4ff5-a016-bb927dd0ec78\") " pod="openshift-apiserver/apiserver-76f77b778f-rtrxr" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.524965 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/e66f4856-1a57-441f-9701-4f61008259c6-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-6sql5\" (UID: \"e66f4856-1a57-441f-9701-4f61008259c6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6sql5" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.524982 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e66f4856-1a57-441f-9701-4f61008259c6-serving-cert\") pod \"apiserver-7bbb656c7d-6sql5\" (UID: \"e66f4856-1a57-441f-9701-4f61008259c6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6sql5" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.525010 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/d55a1ae9-2e28-49f6-904b-67a246fda7e6-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-2flr8\" (UID: \"d55a1ae9-2e28-49f6-904b-67a246fda7e6\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-2flr8" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.525044 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/96f91b1a-e276-4bc1-9308-5375745c803c-console-serving-cert\") pod \"console-f9d7485db-szrp2\" (UID: \"96f91b1a-e276-4bc1-9308-5375745c803c\") " pod="openshift-console/console-f9d7485db-szrp2" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.525061 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e66f4856-1a57-441f-9701-4f61008259c6-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-6sql5\" (UID: \"e66f4856-1a57-441f-9701-4f61008259c6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6sql5" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.525078 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n66j4\" (UniqueName: \"kubernetes.io/projected/96f91b1a-e276-4bc1-9308-5375745c803c-kube-api-access-n66j4\") pod \"console-f9d7485db-szrp2\" (UID: \"96f91b1a-e276-4bc1-9308-5375745c803c\") " pod="openshift-console/console-f9d7485db-szrp2" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.525096 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/3f8faa64-f7f4-4ff5-a016-bb927dd0ec78-etcd-client\") pod \"apiserver-76f77b778f-rtrxr\" (UID: \"3f8faa64-f7f4-4ff5-a016-bb927dd0ec78\") " pod="openshift-apiserver/apiserver-76f77b778f-rtrxr" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.525373 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.525394 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-kxz4b" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.529621 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-zqthg"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.530201 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.530474 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-zqthg" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.530873 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-5bjvj"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.531409 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-5bjvj" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.531531 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-s8dhg"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.534960 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-bgnq2"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.535069 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-s8dhg" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.535441 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-bgnq2" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.535837 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-nwjdw"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.536496 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.536731 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-2mk6j"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.536751 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.536934 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.537009 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.537100 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.537227 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2mk6j" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.537452 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-nwjdw" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.537721 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.537757 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.539258 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.539602 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.540128 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.540332 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.544331 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.544591 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.544814 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.544933 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-rlm59"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.545759 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-rlm59" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.553064 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.553726 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.554180 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.554513 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.555542 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-dn2nd"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.557547 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.558099 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-dn2nd" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.559906 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-cp2jw"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.561842 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.561964 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-cp2jw" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.565351 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319675-qdtss"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.567061 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319675-qdtss" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.571875 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-dxnj8"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.572931 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-dxnj8" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.574059 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-2flr8"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.575126 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-nh5bz"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.575965 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nh5bz" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.576236 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pjbd8"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.576932 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pjbd8" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.577434 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-5f6q5"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.578295 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5f6q5" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.578553 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-s62fl"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.579006 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.579698 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-s62fl" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.581410 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8gplk"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.582640 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-mmgf6"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.583400 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-mmgf6" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.583409 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8gplk" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.583910 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-gx2fb"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.584417 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-gx2fb" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.584832 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-6rs7d"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.586198 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6rs7d" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.586381 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-wslbb"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.587287 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-wslbb" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.587498 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-j9nwx"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.588092 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-j9nwx" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.588480 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-rtrxr"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.589479 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2fdjj"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.590463 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2gvvc"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.591320 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2gvvc" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.591511 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-8djg4"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.593975 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-f7vrh"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.595607 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-wb4m9"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.596358 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-k7lgs"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.597639 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-grwjm"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.599710 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.609071 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-dl4br"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.609136 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-mrbmd"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.612186 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-h9qcg"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.612673 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-grwjm" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.614842 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-c5pgg"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.614881 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-zqthg"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.616287 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-cp2jw"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.617735 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-s8dhg"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.619267 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.621005 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-5bjvj"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.622780 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-bgnq2"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.624651 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-nwjdw"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.626214 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/3f8faa64-f7f4-4ff5-a016-bb927dd0ec78-audit-dir\") pod \"apiserver-76f77b778f-rtrxr\" (UID: \"3f8faa64-f7f4-4ff5-a016-bb927dd0ec78\") " pod="openshift-apiserver/apiserver-76f77b778f-rtrxr" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.626263 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9t2f8\" (UniqueName: \"kubernetes.io/projected/cd6c061e-79d1-4353-b077-69cb656a8823-kube-api-access-9t2f8\") pod \"openshift-config-operator-7777fb866f-c5pgg\" (UID: \"cd6c061e-79d1-4353-b077-69cb656a8823\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-c5pgg" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.626288 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c75rb\" (UniqueName: \"kubernetes.io/projected/3f8faa64-f7f4-4ff5-a016-bb927dd0ec78-kube-api-access-c75rb\") pod \"apiserver-76f77b778f-rtrxr\" (UID: \"3f8faa64-f7f4-4ff5-a016-bb927dd0ec78\") " pod="openshift-apiserver/apiserver-76f77b778f-rtrxr" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.626309 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9a381472-08f8-4263-b008-573df71b1605-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-2fdjj\" (UID: \"9a381472-08f8-4263-b008-573df71b1605\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2fdjj" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.626329 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d55a1ae9-2e28-49f6-904b-67a246fda7e6-config\") pod \"machine-api-operator-5694c8668f-2flr8\" (UID: \"d55a1ae9-2e28-49f6-904b-67a246fda7e6\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-2flr8" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.626349 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/3f8faa64-f7f4-4ff5-a016-bb927dd0ec78-image-import-ca\") pod \"apiserver-76f77b778f-rtrxr\" (UID: \"3f8faa64-f7f4-4ff5-a016-bb927dd0ec78\") " pod="openshift-apiserver/apiserver-76f77b778f-rtrxr" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.626353 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/3f8faa64-f7f4-4ff5-a016-bb927dd0ec78-audit-dir\") pod \"apiserver-76f77b778f-rtrxr\" (UID: \"3f8faa64-f7f4-4ff5-a016-bb927dd0ec78\") " pod="openshift-apiserver/apiserver-76f77b778f-rtrxr" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.626368 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/e66f4856-1a57-441f-9701-4f61008259c6-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-6sql5\" (UID: \"e66f4856-1a57-441f-9701-4f61008259c6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6sql5" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.626419 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e66f4856-1a57-441f-9701-4f61008259c6-serving-cert\") pod \"apiserver-7bbb656c7d-6sql5\" (UID: \"e66f4856-1a57-441f-9701-4f61008259c6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6sql5" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.626451 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/affba9da-62d0-47e6-b833-8b6c0e774fde-config\") pod \"controller-manager-879f6c89f-wb4m9\" (UID: \"affba9da-62d0-47e6-b833-8b6c0e774fde\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wb4m9" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.626477 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/affba9da-62d0-47e6-b833-8b6c0e774fde-serving-cert\") pod \"controller-manager-879f6c89f-wb4m9\" (UID: \"affba9da-62d0-47e6-b833-8b6c0e774fde\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wb4m9" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.626523 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/d55a1ae9-2e28-49f6-904b-67a246fda7e6-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-2flr8\" (UID: \"d55a1ae9-2e28-49f6-904b-67a246fda7e6\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-2flr8" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.626548 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/96f91b1a-e276-4bc1-9308-5375745c803c-console-serving-cert\") pod \"console-f9d7485db-szrp2\" (UID: \"96f91b1a-e276-4bc1-9308-5375745c803c\") " pod="openshift-console/console-f9d7485db-szrp2" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.626570 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e66f4856-1a57-441f-9701-4f61008259c6-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-6sql5\" (UID: \"e66f4856-1a57-441f-9701-4f61008259c6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6sql5" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.626619 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n66j4\" (UniqueName: \"kubernetes.io/projected/96f91b1a-e276-4bc1-9308-5375745c803c-kube-api-access-n66j4\") pod \"console-f9d7485db-szrp2\" (UID: \"96f91b1a-e276-4bc1-9308-5375745c803c\") " pod="openshift-console/console-f9d7485db-szrp2" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.626672 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/3f8faa64-f7f4-4ff5-a016-bb927dd0ec78-etcd-client\") pod \"apiserver-76f77b778f-rtrxr\" (UID: \"3f8faa64-f7f4-4ff5-a016-bb927dd0ec78\") " pod="openshift-apiserver/apiserver-76f77b778f-rtrxr" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.626732 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/affba9da-62d0-47e6-b833-8b6c0e774fde-client-ca\") pod \"controller-manager-879f6c89f-wb4m9\" (UID: \"affba9da-62d0-47e6-b833-8b6c0e774fde\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wb4m9" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.626776 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e66f4856-1a57-441f-9701-4f61008259c6-audit-dir\") pod \"apiserver-7bbb656c7d-6sql5\" (UID: \"e66f4856-1a57-441f-9701-4f61008259c6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6sql5" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.626807 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/affba9da-62d0-47e6-b833-8b6c0e774fde-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-wb4m9\" (UID: \"affba9da-62d0-47e6-b833-8b6c0e774fde\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wb4m9" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.626834 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-djxk9\" (UniqueName: \"kubernetes.io/projected/affba9da-62d0-47e6-b833-8b6c0e774fde-kube-api-access-djxk9\") pod \"controller-manager-879f6c89f-wb4m9\" (UID: \"affba9da-62d0-47e6-b833-8b6c0e774fde\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wb4m9" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.626863 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lmpqp\" (UniqueName: \"kubernetes.io/projected/d55a1ae9-2e28-49f6-904b-67a246fda7e6-kube-api-access-lmpqp\") pod \"machine-api-operator-5694c8668f-2flr8\" (UID: \"d55a1ae9-2e28-49f6-904b-67a246fda7e6\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-2flr8" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.626883 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/e66f4856-1a57-441f-9701-4f61008259c6-audit-policies\") pod \"apiserver-7bbb656c7d-6sql5\" (UID: \"e66f4856-1a57-441f-9701-4f61008259c6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6sql5" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.626902 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9a381472-08f8-4263-b008-573df71b1605-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-2fdjj\" (UID: \"9a381472-08f8-4263-b008-573df71b1605\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2fdjj" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.626937 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/d55a1ae9-2e28-49f6-904b-67a246fda7e6-images\") pod \"machine-api-operator-5694c8668f-2flr8\" (UID: \"d55a1ae9-2e28-49f6-904b-67a246fda7e6\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-2flr8" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.626957 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/96f91b1a-e276-4bc1-9308-5375745c803c-service-ca\") pod \"console-f9d7485db-szrp2\" (UID: \"96f91b1a-e276-4bc1-9308-5375745c803c\") " pod="openshift-console/console-f9d7485db-szrp2" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.626978 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/3f8faa64-f7f4-4ff5-a016-bb927dd0ec78-audit\") pod \"apiserver-76f77b778f-rtrxr\" (UID: \"3f8faa64-f7f4-4ff5-a016-bb927dd0ec78\") " pod="openshift-apiserver/apiserver-76f77b778f-rtrxr" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.627000 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/cd6c061e-79d1-4353-b077-69cb656a8823-available-featuregates\") pod \"openshift-config-operator-7777fb866f-c5pgg\" (UID: \"cd6c061e-79d1-4353-b077-69cb656a8823\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-c5pgg" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.627026 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3f8faa64-f7f4-4ff5-a016-bb927dd0ec78-trusted-ca-bundle\") pod \"apiserver-76f77b778f-rtrxr\" (UID: \"3f8faa64-f7f4-4ff5-a016-bb927dd0ec78\") " pod="openshift-apiserver/apiserver-76f77b778f-rtrxr" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.627044 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/96f91b1a-e276-4bc1-9308-5375745c803c-console-config\") pod \"console-f9d7485db-szrp2\" (UID: \"96f91b1a-e276-4bc1-9308-5375745c803c\") " pod="openshift-console/console-f9d7485db-szrp2" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.627060 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/3f8faa64-f7f4-4ff5-a016-bb927dd0ec78-node-pullsecrets\") pod \"apiserver-76f77b778f-rtrxr\" (UID: \"3f8faa64-f7f4-4ff5-a016-bb927dd0ec78\") " pod="openshift-apiserver/apiserver-76f77b778f-rtrxr" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.627075 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/3f8faa64-f7f4-4ff5-a016-bb927dd0ec78-etcd-serving-ca\") pod \"apiserver-76f77b778f-rtrxr\" (UID: \"3f8faa64-f7f4-4ff5-a016-bb927dd0ec78\") " pod="openshift-apiserver/apiserver-76f77b778f-rtrxr" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.627094 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e1d74425-0991-4922-9b99-95bb5e1c596c-config\") pod \"console-operator-58897d9998-f7vrh\" (UID: \"e1d74425-0991-4922-9b99-95bb5e1c596c\") " pod="openshift-console-operator/console-operator-58897d9998-f7vrh" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.627115 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vlfjd\" (UniqueName: \"kubernetes.io/projected/e66f4856-1a57-441f-9701-4f61008259c6-kube-api-access-vlfjd\") pod \"apiserver-7bbb656c7d-6sql5\" (UID: \"e66f4856-1a57-441f-9701-4f61008259c6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6sql5" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.627132 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9a381472-08f8-4263-b008-573df71b1605-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-2fdjj\" (UID: \"9a381472-08f8-4263-b008-573df71b1605\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2fdjj" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.627149 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/96f91b1a-e276-4bc1-9308-5375745c803c-trusted-ca-bundle\") pod \"console-f9d7485db-szrp2\" (UID: \"96f91b1a-e276-4bc1-9308-5375745c803c\") " pod="openshift-console/console-f9d7485db-szrp2" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.627166 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3f8faa64-f7f4-4ff5-a016-bb927dd0ec78-serving-cert\") pod \"apiserver-76f77b778f-rtrxr\" (UID: \"3f8faa64-f7f4-4ff5-a016-bb927dd0ec78\") " pod="openshift-apiserver/apiserver-76f77b778f-rtrxr" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.627181 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/3f8faa64-f7f4-4ff5-a016-bb927dd0ec78-encryption-config\") pod \"apiserver-76f77b778f-rtrxr\" (UID: \"3f8faa64-f7f4-4ff5-a016-bb927dd0ec78\") " pod="openshift-apiserver/apiserver-76f77b778f-rtrxr" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.627217 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/e66f4856-1a57-441f-9701-4f61008259c6-etcd-client\") pod \"apiserver-7bbb656c7d-6sql5\" (UID: \"e66f4856-1a57-441f-9701-4f61008259c6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6sql5" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.627239 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/96f91b1a-e276-4bc1-9308-5375745c803c-console-oauth-config\") pod \"console-f9d7485db-szrp2\" (UID: \"96f91b1a-e276-4bc1-9308-5375745c803c\") " pod="openshift-console/console-f9d7485db-szrp2" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.627256 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/96f91b1a-e276-4bc1-9308-5375745c803c-oauth-serving-cert\") pod \"console-f9d7485db-szrp2\" (UID: \"96f91b1a-e276-4bc1-9308-5375745c803c\") " pod="openshift-console/console-f9d7485db-szrp2" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.627272 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f8faa64-f7f4-4ff5-a016-bb927dd0ec78-config\") pod \"apiserver-76f77b778f-rtrxr\" (UID: \"3f8faa64-f7f4-4ff5-a016-bb927dd0ec78\") " pod="openshift-apiserver/apiserver-76f77b778f-rtrxr" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.627289 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cd6c061e-79d1-4353-b077-69cb656a8823-serving-cert\") pod \"openshift-config-operator-7777fb866f-c5pgg\" (UID: \"cd6c061e-79d1-4353-b077-69cb656a8823\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-c5pgg" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.627311 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rwc7t\" (UniqueName: \"kubernetes.io/projected/e1d74425-0991-4922-9b99-95bb5e1c596c-kube-api-access-rwc7t\") pod \"console-operator-58897d9998-f7vrh\" (UID: \"e1d74425-0991-4922-9b99-95bb5e1c596c\") " pod="openshift-console-operator/console-operator-58897d9998-f7vrh" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.627331 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/e66f4856-1a57-441f-9701-4f61008259c6-encryption-config\") pod \"apiserver-7bbb656c7d-6sql5\" (UID: \"e66f4856-1a57-441f-9701-4f61008259c6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6sql5" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.627349 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e1d74425-0991-4922-9b99-95bb5e1c596c-serving-cert\") pod \"console-operator-58897d9998-f7vrh\" (UID: \"e1d74425-0991-4922-9b99-95bb5e1c596c\") " pod="openshift-console-operator/console-operator-58897d9998-f7vrh" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.627365 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e1d74425-0991-4922-9b99-95bb5e1c596c-trusted-ca\") pod \"console-operator-58897d9998-f7vrh\" (UID: \"e1d74425-0991-4922-9b99-95bb5e1c596c\") " pod="openshift-console-operator/console-operator-58897d9998-f7vrh" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.627437 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/e66f4856-1a57-441f-9701-4f61008259c6-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-6sql5\" (UID: \"e66f4856-1a57-441f-9701-4f61008259c6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6sql5" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.627511 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/3f8faa64-f7f4-4ff5-a016-bb927dd0ec78-node-pullsecrets\") pod \"apiserver-76f77b778f-rtrxr\" (UID: \"3f8faa64-f7f4-4ff5-a016-bb927dd0ec78\") " pod="openshift-apiserver/apiserver-76f77b778f-rtrxr" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.627535 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/3f8faa64-f7f4-4ff5-a016-bb927dd0ec78-image-import-ca\") pod \"apiserver-76f77b778f-rtrxr\" (UID: \"3f8faa64-f7f4-4ff5-a016-bb927dd0ec78\") " pod="openshift-apiserver/apiserver-76f77b778f-rtrxr" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.627809 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e66f4856-1a57-441f-9701-4f61008259c6-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-6sql5\" (UID: \"e66f4856-1a57-441f-9701-4f61008259c6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6sql5" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.628063 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d55a1ae9-2e28-49f6-904b-67a246fda7e6-config\") pod \"machine-api-operator-5694c8668f-2flr8\" (UID: \"d55a1ae9-2e28-49f6-904b-67a246fda7e6\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-2flr8" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.628211 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/e66f4856-1a57-441f-9701-4f61008259c6-audit-policies\") pod \"apiserver-7bbb656c7d-6sql5\" (UID: \"e66f4856-1a57-441f-9701-4f61008259c6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6sql5" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.628434 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/d55a1ae9-2e28-49f6-904b-67a246fda7e6-images\") pod \"machine-api-operator-5694c8668f-2flr8\" (UID: \"d55a1ae9-2e28-49f6-904b-67a246fda7e6\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-2flr8" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.628603 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/96f91b1a-e276-4bc1-9308-5375745c803c-service-ca\") pod \"console-f9d7485db-szrp2\" (UID: \"96f91b1a-e276-4bc1-9308-5375745c803c\") " pod="openshift-console/console-f9d7485db-szrp2" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.629034 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/96f91b1a-e276-4bc1-9308-5375745c803c-trusted-ca-bundle\") pod \"console-f9d7485db-szrp2\" (UID: \"96f91b1a-e276-4bc1-9308-5375745c803c\") " pod="openshift-console/console-f9d7485db-szrp2" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.629326 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3f8faa64-f7f4-4ff5-a016-bb927dd0ec78-trusted-ca-bundle\") pod \"apiserver-76f77b778f-rtrxr\" (UID: \"3f8faa64-f7f4-4ff5-a016-bb927dd0ec78\") " pod="openshift-apiserver/apiserver-76f77b778f-rtrxr" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.629709 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f8faa64-f7f4-4ff5-a016-bb927dd0ec78-config\") pod \"apiserver-76f77b778f-rtrxr\" (UID: \"3f8faa64-f7f4-4ff5-a016-bb927dd0ec78\") " pod="openshift-apiserver/apiserver-76f77b778f-rtrxr" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.629783 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/3f8faa64-f7f4-4ff5-a016-bb927dd0ec78-audit\") pod \"apiserver-76f77b778f-rtrxr\" (UID: \"3f8faa64-f7f4-4ff5-a016-bb927dd0ec78\") " pod="openshift-apiserver/apiserver-76f77b778f-rtrxr" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.629812 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-dxnj8"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.629817 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/96f91b1a-e276-4bc1-9308-5375745c803c-oauth-serving-cert\") pod \"console-f9d7485db-szrp2\" (UID: \"96f91b1a-e276-4bc1-9308-5375745c803c\") " pod="openshift-console/console-f9d7485db-szrp2" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.630180 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e66f4856-1a57-441f-9701-4f61008259c6-audit-dir\") pod \"apiserver-7bbb656c7d-6sql5\" (UID: \"e66f4856-1a57-441f-9701-4f61008259c6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6sql5" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.630363 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/96f91b1a-e276-4bc1-9308-5375745c803c-console-config\") pod \"console-f9d7485db-szrp2\" (UID: \"96f91b1a-e276-4bc1-9308-5375745c803c\") " pod="openshift-console/console-f9d7485db-szrp2" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.630606 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-6sql5"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.630919 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/3f8faa64-f7f4-4ff5-a016-bb927dd0ec78-etcd-serving-ca\") pod \"apiserver-76f77b778f-rtrxr\" (UID: \"3f8faa64-f7f4-4ff5-a016-bb927dd0ec78\") " pod="openshift-apiserver/apiserver-76f77b778f-rtrxr" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.632526 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-rd6cq"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.634248 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/e66f4856-1a57-441f-9701-4f61008259c6-encryption-config\") pod \"apiserver-7bbb656c7d-6sql5\" (UID: \"e66f4856-1a57-441f-9701-4f61008259c6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6sql5" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.634415 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/96f91b1a-e276-4bc1-9308-5375745c803c-console-oauth-config\") pod \"console-f9d7485db-szrp2\" (UID: \"96f91b1a-e276-4bc1-9308-5375745c803c\") " pod="openshift-console/console-f9d7485db-szrp2" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.634469 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-szrp2"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.634549 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e66f4856-1a57-441f-9701-4f61008259c6-serving-cert\") pod \"apiserver-7bbb656c7d-6sql5\" (UID: \"e66f4856-1a57-441f-9701-4f61008259c6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6sql5" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.634570 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/3f8faa64-f7f4-4ff5-a016-bb927dd0ec78-encryption-config\") pod \"apiserver-76f77b778f-rtrxr\" (UID: \"3f8faa64-f7f4-4ff5-a016-bb927dd0ec78\") " pod="openshift-apiserver/apiserver-76f77b778f-rtrxr" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.634744 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/96f91b1a-e276-4bc1-9308-5375745c803c-console-serving-cert\") pod \"console-f9d7485db-szrp2\" (UID: \"96f91b1a-e276-4bc1-9308-5375745c803c\") " pod="openshift-console/console-f9d7485db-szrp2" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.634885 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/e66f4856-1a57-441f-9701-4f61008259c6-etcd-client\") pod \"apiserver-7bbb656c7d-6sql5\" (UID: \"e66f4856-1a57-441f-9701-4f61008259c6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6sql5" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.635191 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/3f8faa64-f7f4-4ff5-a016-bb927dd0ec78-etcd-client\") pod \"apiserver-76f77b778f-rtrxr\" (UID: \"3f8faa64-f7f4-4ff5-a016-bb927dd0ec78\") " pod="openshift-apiserver/apiserver-76f77b778f-rtrxr" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.635359 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/d55a1ae9-2e28-49f6-904b-67a246fda7e6-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-2flr8\" (UID: \"d55a1ae9-2e28-49f6-904b-67a246fda7e6\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-2flr8" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.635998 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pjbd8"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.642114 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-2mk6j"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.642146 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-ms4bf"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.642159 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319675-qdtss"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.645887 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-dn2nd"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.645949 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2gvvc"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.637370 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3f8faa64-f7f4-4ff5-a016-bb927dd0ec78-serving-cert\") pod \"apiserver-76f77b778f-rtrxr\" (UID: \"3f8faa64-f7f4-4ff5-a016-bb927dd0ec78\") " pod="openshift-apiserver/apiserver-76f77b778f-rtrxr" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.647919 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-mmgf6"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.648055 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.649115 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-wslbb"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.651817 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-cqtn6"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.656213 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-2pbvr"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.657216 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-cqtn6" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.659860 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-2pbvr" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.660056 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.660712 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-rlm59"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.662375 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8gplk"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.664634 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-5f6q5"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.666901 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-grwjm"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.668421 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-6rs7d"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.669864 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-s62fl"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.672268 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-nh5bz"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.673556 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-j9nwx"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.675052 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-cqtn6"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.676570 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-2pbvr"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.678080 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.679326 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-q2dkc"] Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.680357 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-q2dkc" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.699256 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.719026 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.728915 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e1d74425-0991-4922-9b99-95bb5e1c596c-config\") pod \"console-operator-58897d9998-f7vrh\" (UID: \"e1d74425-0991-4922-9b99-95bb5e1c596c\") " pod="openshift-console-operator/console-operator-58897d9998-f7vrh" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.729081 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9a381472-08f8-4263-b008-573df71b1605-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-2fdjj\" (UID: \"9a381472-08f8-4263-b008-573df71b1605\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2fdjj" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.729202 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cd6c061e-79d1-4353-b077-69cb656a8823-serving-cert\") pod \"openshift-config-operator-7777fb866f-c5pgg\" (UID: \"cd6c061e-79d1-4353-b077-69cb656a8823\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-c5pgg" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.729319 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rwc7t\" (UniqueName: \"kubernetes.io/projected/e1d74425-0991-4922-9b99-95bb5e1c596c-kube-api-access-rwc7t\") pod \"console-operator-58897d9998-f7vrh\" (UID: \"e1d74425-0991-4922-9b99-95bb5e1c596c\") " pod="openshift-console-operator/console-operator-58897d9998-f7vrh" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.729432 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e1d74425-0991-4922-9b99-95bb5e1c596c-serving-cert\") pod \"console-operator-58897d9998-f7vrh\" (UID: \"e1d74425-0991-4922-9b99-95bb5e1c596c\") " pod="openshift-console-operator/console-operator-58897d9998-f7vrh" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.729532 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e1d74425-0991-4922-9b99-95bb5e1c596c-trusted-ca\") pod \"console-operator-58897d9998-f7vrh\" (UID: \"e1d74425-0991-4922-9b99-95bb5e1c596c\") " pod="openshift-console-operator/console-operator-58897d9998-f7vrh" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.729660 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9t2f8\" (UniqueName: \"kubernetes.io/projected/cd6c061e-79d1-4353-b077-69cb656a8823-kube-api-access-9t2f8\") pod \"openshift-config-operator-7777fb866f-c5pgg\" (UID: \"cd6c061e-79d1-4353-b077-69cb656a8823\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-c5pgg" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.729772 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9a381472-08f8-4263-b008-573df71b1605-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-2fdjj\" (UID: \"9a381472-08f8-4263-b008-573df71b1605\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2fdjj" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.729891 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/affba9da-62d0-47e6-b833-8b6c0e774fde-config\") pod \"controller-manager-879f6c89f-wb4m9\" (UID: \"affba9da-62d0-47e6-b833-8b6c0e774fde\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wb4m9" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.729985 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/affba9da-62d0-47e6-b833-8b6c0e774fde-serving-cert\") pod \"controller-manager-879f6c89f-wb4m9\" (UID: \"affba9da-62d0-47e6-b833-8b6c0e774fde\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wb4m9" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.730143 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/affba9da-62d0-47e6-b833-8b6c0e774fde-client-ca\") pod \"controller-manager-879f6c89f-wb4m9\" (UID: \"affba9da-62d0-47e6-b833-8b6c0e774fde\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wb4m9" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.730247 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/affba9da-62d0-47e6-b833-8b6c0e774fde-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-wb4m9\" (UID: \"affba9da-62d0-47e6-b833-8b6c0e774fde\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wb4m9" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.730357 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-djxk9\" (UniqueName: \"kubernetes.io/projected/affba9da-62d0-47e6-b833-8b6c0e774fde-kube-api-access-djxk9\") pod \"controller-manager-879f6c89f-wb4m9\" (UID: \"affba9da-62d0-47e6-b833-8b6c0e774fde\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wb4m9" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.730460 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9a381472-08f8-4263-b008-573df71b1605-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-2fdjj\" (UID: \"9a381472-08f8-4263-b008-573df71b1605\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2fdjj" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.730581 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/cd6c061e-79d1-4353-b077-69cb656a8823-available-featuregates\") pod \"openshift-config-operator-7777fb866f-c5pgg\" (UID: \"cd6c061e-79d1-4353-b077-69cb656a8823\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-c5pgg" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.731022 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9a381472-08f8-4263-b008-573df71b1605-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-2fdjj\" (UID: \"9a381472-08f8-4263-b008-573df71b1605\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2fdjj" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.730258 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e1d74425-0991-4922-9b99-95bb5e1c596c-config\") pod \"console-operator-58897d9998-f7vrh\" (UID: \"e1d74425-0991-4922-9b99-95bb5e1c596c\") " pod="openshift-console-operator/console-operator-58897d9998-f7vrh" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.731511 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/cd6c061e-79d1-4353-b077-69cb656a8823-available-featuregates\") pod \"openshift-config-operator-7777fb866f-c5pgg\" (UID: \"cd6c061e-79d1-4353-b077-69cb656a8823\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-c5pgg" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.731553 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e1d74425-0991-4922-9b99-95bb5e1c596c-trusted-ca\") pod \"console-operator-58897d9998-f7vrh\" (UID: \"e1d74425-0991-4922-9b99-95bb5e1c596c\") " pod="openshift-console-operator/console-operator-58897d9998-f7vrh" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.732000 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/affba9da-62d0-47e6-b833-8b6c0e774fde-config\") pod \"controller-manager-879f6c89f-wb4m9\" (UID: \"affba9da-62d0-47e6-b833-8b6c0e774fde\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wb4m9" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.732190 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/affba9da-62d0-47e6-b833-8b6c0e774fde-client-ca\") pod \"controller-manager-879f6c89f-wb4m9\" (UID: \"affba9da-62d0-47e6-b833-8b6c0e774fde\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wb4m9" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.733301 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/affba9da-62d0-47e6-b833-8b6c0e774fde-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-wb4m9\" (UID: \"affba9da-62d0-47e6-b833-8b6c0e774fde\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wb4m9" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.734926 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9a381472-08f8-4263-b008-573df71b1605-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-2fdjj\" (UID: \"9a381472-08f8-4263-b008-573df71b1605\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2fdjj" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.735213 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e1d74425-0991-4922-9b99-95bb5e1c596c-serving-cert\") pod \"console-operator-58897d9998-f7vrh\" (UID: \"e1d74425-0991-4922-9b99-95bb5e1c596c\") " pod="openshift-console-operator/console-operator-58897d9998-f7vrh" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.738653 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.741422 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/affba9da-62d0-47e6-b833-8b6c0e774fde-serving-cert\") pod \"controller-manager-879f6c89f-wb4m9\" (UID: \"affba9da-62d0-47e6-b833-8b6c0e774fde\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wb4m9" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.741803 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cd6c061e-79d1-4353-b077-69cb656a8823-serving-cert\") pod \"openshift-config-operator-7777fb866f-c5pgg\" (UID: \"cd6c061e-79d1-4353-b077-69cb656a8823\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-c5pgg" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.760676 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.785082 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.805221 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.831188 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/9a3253d3-c916-477b-82cd-7f7911bfc1b0-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-h9qcg\" (UID: \"9a3253d3-c916-477b-82cd-7f7911bfc1b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-h9qcg" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.831227 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sbjl5\" (UniqueName: \"kubernetes.io/projected/9a3253d3-c916-477b-82cd-7f7911bfc1b0-kube-api-access-sbjl5\") pod \"oauth-openshift-558db77b4-h9qcg\" (UID: \"9a3253d3-c916-477b-82cd-7f7911bfc1b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-h9qcg" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.831253 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/4357f10f-dad3-4233-9d03-1cad6319e4a9-bound-sa-token\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.831311 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/4357f10f-dad3-4233-9d03-1cad6319e4a9-registry-tls\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.831401 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/4357f10f-dad3-4233-9d03-1cad6319e4a9-registry-certificates\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.831460 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/9a3253d3-c916-477b-82cd-7f7911bfc1b0-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-h9qcg\" (UID: \"9a3253d3-c916-477b-82cd-7f7911bfc1b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-h9qcg" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.831493 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nlb4z\" (UniqueName: \"kubernetes.io/projected/a47f0c8e-aa7c-456e-a69d-9b3f4c02bfb8-kube-api-access-nlb4z\") pod \"cluster-samples-operator-665b6dd947-ms4bf\" (UID: \"a47f0c8e-aa7c-456e-a69d-9b3f4c02bfb8\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-ms4bf" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.831523 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/79387310-5596-4b5f-af33-0f6b8a9a40ff-metrics-tls\") pod \"dns-operator-744455d44c-dl4br\" (UID: \"79387310-5596-4b5f-af33-0f6b8a9a40ff\") " pod="openshift-dns-operator/dns-operator-744455d44c-dl4br" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.831604 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/4357f10f-dad3-4233-9d03-1cad6319e4a9-ca-trust-extracted\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.831659 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/cce1e199-e6e7-41e1-b1b6-83f8d435c80e-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-k7lgs\" (UID: \"cce1e199-e6e7-41e1-b1b6-83f8d435c80e\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-k7lgs" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.831697 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5bqg2\" (UniqueName: \"kubernetes.io/projected/cce1e199-e6e7-41e1-b1b6-83f8d435c80e-kube-api-access-5bqg2\") pod \"cluster-image-registry-operator-dc59b4c8b-k7lgs\" (UID: \"cce1e199-e6e7-41e1-b1b6-83f8d435c80e\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-k7lgs" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.831770 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.831826 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/9a3253d3-c916-477b-82cd-7f7911bfc1b0-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-h9qcg\" (UID: \"9a3253d3-c916-477b-82cd-7f7911bfc1b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-h9qcg" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.831876 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/4357f10f-dad3-4233-9d03-1cad6319e4a9-installation-pull-secrets\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.831907 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/9a3253d3-c916-477b-82cd-7f7911bfc1b0-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-h9qcg\" (UID: \"9a3253d3-c916-477b-82cd-7f7911bfc1b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-h9qcg" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.831942 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/cce1e199-e6e7-41e1-b1b6-83f8d435c80e-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-k7lgs\" (UID: \"cce1e199-e6e7-41e1-b1b6-83f8d435c80e\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-k7lgs" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.831987 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4wlfq\" (UniqueName: \"kubernetes.io/projected/79387310-5596-4b5f-af33-0f6b8a9a40ff-kube-api-access-4wlfq\") pod \"dns-operator-744455d44c-dl4br\" (UID: \"79387310-5596-4b5f-af33-0f6b8a9a40ff\") " pod="openshift-dns-operator/dns-operator-744455d44c-dl4br" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.832098 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/9a3253d3-c916-477b-82cd-7f7911bfc1b0-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-h9qcg\" (UID: \"9a3253d3-c916-477b-82cd-7f7911bfc1b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-h9qcg" Sep 29 21:27:41 crc kubenswrapper[4911]: E0929 21:27:41.832118 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 21:27:42.332104589 +0000 UTC m=+140.309217260 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8djg4" (UID: "4357f10f-dad3-4233-9d03-1cad6319e4a9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.832144 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/9a3253d3-c916-477b-82cd-7f7911bfc1b0-audit-policies\") pod \"oauth-openshift-558db77b4-h9qcg\" (UID: \"9a3253d3-c916-477b-82cd-7f7911bfc1b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-h9qcg" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.832246 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/9a3253d3-c916-477b-82cd-7f7911bfc1b0-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-h9qcg\" (UID: \"9a3253d3-c916-477b-82cd-7f7911bfc1b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-h9qcg" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.832276 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/cce1e199-e6e7-41e1-b1b6-83f8d435c80e-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-k7lgs\" (UID: \"cce1e199-e6e7-41e1-b1b6-83f8d435c80e\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-k7lgs" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.832326 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9a3253d3-c916-477b-82cd-7f7911bfc1b0-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-h9qcg\" (UID: \"9a3253d3-c916-477b-82cd-7f7911bfc1b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-h9qcg" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.832361 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4357f10f-dad3-4233-9d03-1cad6319e4a9-trusted-ca\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.832518 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/9a3253d3-c916-477b-82cd-7f7911bfc1b0-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-h9qcg\" (UID: \"9a3253d3-c916-477b-82cd-7f7911bfc1b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-h9qcg" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.832556 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-46w52\" (UniqueName: \"kubernetes.io/projected/4357f10f-dad3-4233-9d03-1cad6319e4a9-kube-api-access-46w52\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.832573 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/9a3253d3-c916-477b-82cd-7f7911bfc1b0-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-h9qcg\" (UID: \"9a3253d3-c916-477b-82cd-7f7911bfc1b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-h9qcg" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.832594 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a47f0c8e-aa7c-456e-a69d-9b3f4c02bfb8-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-ms4bf\" (UID: \"a47f0c8e-aa7c-456e-a69d-9b3f4c02bfb8\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-ms4bf" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.832620 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/9a3253d3-c916-477b-82cd-7f7911bfc1b0-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-h9qcg\" (UID: \"9a3253d3-c916-477b-82cd-7f7911bfc1b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-h9qcg" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.832712 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/9a3253d3-c916-477b-82cd-7f7911bfc1b0-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-h9qcg\" (UID: \"9a3253d3-c916-477b-82cd-7f7911bfc1b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-h9qcg" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.832780 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/9a3253d3-c916-477b-82cd-7f7911bfc1b0-audit-dir\") pod \"oauth-openshift-558db77b4-h9qcg\" (UID: \"9a3253d3-c916-477b-82cd-7f7911bfc1b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-h9qcg" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.838331 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.858371 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.878229 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.898730 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.918224 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.933848 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:27:41 crc kubenswrapper[4911]: E0929 21:27:41.934040 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 21:27:42.43401681 +0000 UTC m=+140.411129481 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.934116 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bkw58\" (UniqueName: \"kubernetes.io/projected/ae2112f8-47a1-4faf-8ee6-83f96c5a3def-kube-api-access-bkw58\") pod \"csi-hostpathplugin-grwjm\" (UID: \"ae2112f8-47a1-4faf-8ee6-83f96c5a3def\") " pod="hostpath-provisioner/csi-hostpathplugin-grwjm" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.934176 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/49f9ff74-e395-4c2a-b467-c248ab4ae4bb-config\") pod \"kube-controller-manager-operator-78b949d7b-s62fl\" (UID: \"49f9ff74-e395-4c2a-b467-c248ab4ae4bb\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-s62fl" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.934204 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9da83d12-76d2-4194-a344-a1453b536a27-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-zqthg\" (UID: \"9da83d12-76d2-4194-a344-a1453b536a27\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-zqthg" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.934255 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bpvlm\" (UniqueName: \"kubernetes.io/projected/8fd16ad3-3d7f-4b84-baf5-bb7a3dd07cd9-kube-api-access-bpvlm\") pod \"machine-config-operator-74547568cd-nh5bz\" (UID: \"8fd16ad3-3d7f-4b84-baf5-bb7a3dd07cd9\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nh5bz" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.934303 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/3b6e2fa8-d098-4250-b417-c577a01c5975-profile-collector-cert\") pod \"catalog-operator-68c6474976-dn2nd\" (UID: \"3b6e2fa8-d098-4250-b417-c577a01c5975\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-dn2nd" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.934328 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3636d93a-cce9-4086-9cdc-b907988e1ff8-serving-cert\") pod \"etcd-operator-b45778765-j9nwx\" (UID: \"3636d93a-cce9-4086-9cdc-b907988e1ff8\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j9nwx" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.934425 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/9ff35d74-5cd5-4d25-bb50-8302240285b1-apiservice-cert\") pod \"packageserver-d55dfcdfc-pjbd8\" (UID: \"9ff35d74-5cd5-4d25-bb50-8302240285b1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pjbd8" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.934495 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d3eae054-c8ac-4d16-a058-8a724a8a6d0f-config\") pod \"kube-apiserver-operator-766d6c64bb-mmgf6\" (UID: \"d3eae054-c8ac-4d16-a058-8a724a8a6d0f\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-mmgf6" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.934531 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vr22l\" (UniqueName: \"kubernetes.io/projected/cd47cdce-41f4-416c-9436-4c386c50eb9e-kube-api-access-vr22l\") pod \"route-controller-manager-6576b87f9c-2mk6j\" (UID: \"cd47cdce-41f4-416c-9436-4c386c50eb9e\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2mk6j" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.934586 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tf22b\" (UniqueName: \"kubernetes.io/projected/5d7dea83-ad4f-4f99-a281-1dcec6929b25-kube-api-access-tf22b\") pod \"dns-default-2pbvr\" (UID: \"5d7dea83-ad4f-4f99-a281-1dcec6929b25\") " pod="openshift-dns/dns-default-2pbvr" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.934620 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rppr2\" (UniqueName: \"kubernetes.io/projected/db0c906b-7d60-4048-ad1d-e9765282348f-kube-api-access-rppr2\") pod \"openshift-apiserver-operator-796bbdcf4f-cp2jw\" (UID: \"db0c906b-7d60-4048-ad1d-e9765282348f\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-cp2jw" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.934646 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/72e24945-9968-4fe1-acd3-84d4c0e6099f-profile-collector-cert\") pod \"olm-operator-6b444d44fb-8gplk\" (UID: \"72e24945-9968-4fe1-acd3-84d4c0e6099f\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8gplk" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.934673 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/639123e9-fc56-4554-a59a-0d2c3866c340-bound-sa-token\") pod \"ingress-operator-5b745b69d9-5f6q5\" (UID: \"639123e9-fc56-4554-a59a-0d2c3866c340\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5f6q5" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.934737 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b5lbq\" (UniqueName: \"kubernetes.io/projected/7362e2ef-cab6-4891-bb59-a7969b6e72b6-kube-api-access-b5lbq\") pod \"downloads-7954f5f757-mrbmd\" (UID: \"7362e2ef-cab6-4891-bb59-a7969b6e72b6\") " pod="openshift-console/downloads-7954f5f757-mrbmd" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.934899 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a7232476-6ca7-47a3-8a5d-2d0e26afebdc-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-5bjvj\" (UID: \"a7232476-6ca7-47a3-8a5d-2d0e26afebdc\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-5bjvj" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.934966 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/3636d93a-cce9-4086-9cdc-b907988e1ff8-etcd-client\") pod \"etcd-operator-b45778765-j9nwx\" (UID: \"3636d93a-cce9-4086-9cdc-b907988e1ff8\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j9nwx" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.935006 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/ae2112f8-47a1-4faf-8ee6-83f96c5a3def-registration-dir\") pod \"csi-hostpathplugin-grwjm\" (UID: \"ae2112f8-47a1-4faf-8ee6-83f96c5a3def\") " pod="hostpath-provisioner/csi-hostpathplugin-grwjm" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.935024 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/ae2112f8-47a1-4faf-8ee6-83f96c5a3def-plugins-dir\") pod \"csi-hostpathplugin-grwjm\" (UID: \"ae2112f8-47a1-4faf-8ee6-83f96c5a3def\") " pod="hostpath-provisioner/csi-hostpathplugin-grwjm" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.935101 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/3636d93a-cce9-4086-9cdc-b907988e1ff8-etcd-ca\") pod \"etcd-operator-b45778765-j9nwx\" (UID: \"3636d93a-cce9-4086-9cdc-b907988e1ff8\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j9nwx" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.935213 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3636d93a-cce9-4086-9cdc-b907988e1ff8-config\") pod \"etcd-operator-b45778765-j9nwx\" (UID: \"3636d93a-cce9-4086-9cdc-b907988e1ff8\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j9nwx" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.935256 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/8fd16ad3-3d7f-4b84-baf5-bb7a3dd07cd9-proxy-tls\") pod \"machine-config-operator-74547568cd-nh5bz\" (UID: \"8fd16ad3-3d7f-4b84-baf5-bb7a3dd07cd9\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nh5bz" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.935289 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/639123e9-fc56-4554-a59a-0d2c3866c340-trusted-ca\") pod \"ingress-operator-5b745b69d9-5f6q5\" (UID: \"639123e9-fc56-4554-a59a-0d2c3866c340\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5f6q5" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.935349 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/4357f10f-dad3-4233-9d03-1cad6319e4a9-registry-tls\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.935369 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/9a3253d3-c916-477b-82cd-7f7911bfc1b0-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-h9qcg\" (UID: \"9a3253d3-c916-477b-82cd-7f7911bfc1b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-h9qcg" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.935393 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g2zzr\" (UniqueName: \"kubernetes.io/projected/3636d93a-cce9-4086-9cdc-b907988e1ff8-kube-api-access-g2zzr\") pod \"etcd-operator-b45778765-j9nwx\" (UID: \"3636d93a-cce9-4086-9cdc-b907988e1ff8\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j9nwx" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.935412 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8nn8m\" (UniqueName: \"kubernetes.io/projected/9ff35d74-5cd5-4d25-bb50-8302240285b1-kube-api-access-8nn8m\") pod \"packageserver-d55dfcdfc-pjbd8\" (UID: \"9ff35d74-5cd5-4d25-bb50-8302240285b1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pjbd8" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.935429 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3b34b90b-583c-444d-921c-0d5fa13835d6-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-rd6cq\" (UID: \"3b34b90b-583c-444d-921c-0d5fa13835d6\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rd6cq" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.935460 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/4357f10f-dad3-4233-9d03-1cad6319e4a9-ca-trust-extracted\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.935480 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd47cdce-41f4-416c-9436-4c386c50eb9e-config\") pod \"route-controller-manager-6576b87f9c-2mk6j\" (UID: \"cd47cdce-41f4-416c-9436-4c386c50eb9e\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2mk6j" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.935504 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c4xks\" (UniqueName: \"kubernetes.io/projected/03fb6356-2be5-4caa-b804-5b44d2b52c61-kube-api-access-c4xks\") pod \"service-ca-9c57cc56f-dxnj8\" (UID: \"03fb6356-2be5-4caa-b804-5b44d2b52c61\") " pod="openshift-service-ca/service-ca-9c57cc56f-dxnj8" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.935533 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/03fb6356-2be5-4caa-b804-5b44d2b52c61-signing-cabundle\") pod \"service-ca-9c57cc56f-dxnj8\" (UID: \"03fb6356-2be5-4caa-b804-5b44d2b52c61\") " pod="openshift-service-ca/service-ca-9c57cc56f-dxnj8" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.936065 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/4357f10f-dad3-4233-9d03-1cad6319e4a9-ca-trust-extracted\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.935594 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k5q5w\" (UniqueName: \"kubernetes.io/projected/11c18a96-1e89-47a1-bae8-7184c53da82f-kube-api-access-k5q5w\") pod \"ingress-canary-cqtn6\" (UID: \"11c18a96-1e89-47a1-bae8-7184c53da82f\") " pod="openshift-ingress-canary/ingress-canary-cqtn6" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.936172 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-chcgd\" (UniqueName: \"kubernetes.io/projected/a3def338-7395-4682-81be-d3e671abf2ed-kube-api-access-chcgd\") pod \"machine-config-controller-84d6567774-6rs7d\" (UID: \"a3def338-7395-4682-81be-d3e671abf2ed\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6rs7d" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.936198 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/9a3253d3-c916-477b-82cd-7f7911bfc1b0-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-h9qcg\" (UID: \"9a3253d3-c916-477b-82cd-7f7911bfc1b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-h9qcg" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.936229 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/9a3253d3-c916-477b-82cd-7f7911bfc1b0-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-h9qcg\" (UID: \"9a3253d3-c916-477b-82cd-7f7911bfc1b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-h9qcg" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.936284 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/9a3253d3-c916-477b-82cd-7f7911bfc1b0-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-h9qcg\" (UID: \"9a3253d3-c916-477b-82cd-7f7911bfc1b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-h9qcg" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.936800 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/db0c906b-7d60-4048-ad1d-e9765282348f-config\") pod \"openshift-apiserver-operator-796bbdcf4f-cp2jw\" (UID: \"db0c906b-7d60-4048-ad1d-e9765282348f\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-cp2jw" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.936883 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f47px\" (UniqueName: \"kubernetes.io/projected/3b6e2fa8-d098-4250-b417-c577a01c5975-kube-api-access-f47px\") pod \"catalog-operator-68c6474976-dn2nd\" (UID: \"3b6e2fa8-d098-4250-b417-c577a01c5975\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-dn2nd" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.936911 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2ad88e09-2e13-4070-9c1f-75cb9dd12ebf-secret-volume\") pod \"collect-profiles-29319675-qdtss\" (UID: \"2ad88e09-2e13-4070-9c1f-75cb9dd12ebf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319675-qdtss" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.936953 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d3eae054-c8ac-4d16-a058-8a724a8a6d0f-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-mmgf6\" (UID: \"d3eae054-c8ac-4d16-a058-8a724a8a6d0f\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-mmgf6" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.937015 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3b34b90b-583c-444d-921c-0d5fa13835d6-service-ca-bundle\") pod \"authentication-operator-69f744f599-rd6cq\" (UID: \"3b34b90b-583c-444d-921c-0d5fa13835d6\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rd6cq" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.937053 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5d7dea83-ad4f-4f99-a281-1dcec6929b25-config-volume\") pod \"dns-default-2pbvr\" (UID: \"5d7dea83-ad4f-4f99-a281-1dcec6929b25\") " pod="openshift-dns/dns-default-2pbvr" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.937074 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/5d7dea83-ad4f-4f99-a281-1dcec6929b25-metrics-tls\") pod \"dns-default-2pbvr\" (UID: \"5d7dea83-ad4f-4f99-a281-1dcec6929b25\") " pod="openshift-dns/dns-default-2pbvr" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.937105 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/9a3253d3-c916-477b-82cd-7f7911bfc1b0-audit-policies\") pod \"oauth-openshift-558db77b4-h9qcg\" (UID: \"9a3253d3-c916-477b-82cd-7f7911bfc1b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-h9qcg" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.937124 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/8fd16ad3-3d7f-4b84-baf5-bb7a3dd07cd9-images\") pod \"machine-config-operator-74547568cd-nh5bz\" (UID: \"8fd16ad3-3d7f-4b84-baf5-bb7a3dd07cd9\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nh5bz" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.937147 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/bb0f6f96-6c75-46c0-9a5e-78310761816e-certs\") pod \"machine-config-server-q2dkc\" (UID: \"bb0f6f96-6c75-46c0-9a5e-78310761816e\") " pod="openshift-machine-config-operator/machine-config-server-q2dkc" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.937190 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4679p\" (UniqueName: \"kubernetes.io/projected/bead4097-1138-4381-9884-93bbf059b717-kube-api-access-4679p\") pod \"router-default-5444994796-gx2fb\" (UID: \"bead4097-1138-4381-9884-93bbf059b717\") " pod="openshift-ingress/router-default-5444994796-gx2fb" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.937626 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kdhfg\" (UniqueName: \"kubernetes.io/projected/2ad88e09-2e13-4070-9c1f-75cb9dd12ebf-kube-api-access-kdhfg\") pod \"collect-profiles-29319675-qdtss\" (UID: \"2ad88e09-2e13-4070-9c1f-75cb9dd12ebf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319675-qdtss" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.937817 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9da83d12-76d2-4194-a344-a1453b536a27-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-zqthg\" (UID: \"9da83d12-76d2-4194-a344-a1453b536a27\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-zqthg" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.938073 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/cce1e199-e6e7-41e1-b1b6-83f8d435c80e-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-k7lgs\" (UID: \"cce1e199-e6e7-41e1-b1b6-83f8d435c80e\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-k7lgs" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.938128 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9a3253d3-c916-477b-82cd-7f7911bfc1b0-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-h9qcg\" (UID: \"9a3253d3-c916-477b-82cd-7f7911bfc1b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-h9qcg" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.938170 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/03fb6356-2be5-4caa-b804-5b44d2b52c61-signing-key\") pod \"service-ca-9c57cc56f-dxnj8\" (UID: \"03fb6356-2be5-4caa-b804-5b44d2b52c61\") " pod="openshift-service-ca/service-ca-9c57cc56f-dxnj8" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.938213 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qkvzp\" (UniqueName: \"kubernetes.io/projected/21e1d3e8-e4aa-43d0-a5c8-61d2dba4a7af-kube-api-access-qkvzp\") pod \"multus-admission-controller-857f4d67dd-nwjdw\" (UID: \"21e1d3e8-e4aa-43d0-a5c8-61d2dba4a7af\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-nwjdw" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.938171 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/9a3253d3-c916-477b-82cd-7f7911bfc1b0-audit-policies\") pod \"oauth-openshift-558db77b4-h9qcg\" (UID: \"9a3253d3-c916-477b-82cd-7f7911bfc1b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-h9qcg" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.938325 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4357f10f-dad3-4233-9d03-1cad6319e4a9-trusted-ca\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.938377 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/9a3253d3-c916-477b-82cd-7f7911bfc1b0-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-h9qcg\" (UID: \"9a3253d3-c916-477b-82cd-7f7911bfc1b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-h9qcg" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.938415 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3b34b90b-583c-444d-921c-0d5fa13835d6-config\") pod \"authentication-operator-69f744f599-rd6cq\" (UID: \"3b34b90b-583c-444d-921c-0d5fa13835d6\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rd6cq" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.938449 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/11c18a96-1e89-47a1-bae8-7184c53da82f-cert\") pod \"ingress-canary-cqtn6\" (UID: \"11c18a96-1e89-47a1-bae8-7184c53da82f\") " pod="openshift-ingress-canary/ingress-canary-cqtn6" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.938471 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/9ff35d74-5cd5-4d25-bb50-8302240285b1-tmpfs\") pod \"packageserver-d55dfcdfc-pjbd8\" (UID: \"9ff35d74-5cd5-4d25-bb50-8302240285b1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pjbd8" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.938497 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/9a3253d3-c916-477b-82cd-7f7911bfc1b0-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-h9qcg\" (UID: \"9a3253d3-c916-477b-82cd-7f7911bfc1b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-h9qcg" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.938520 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/21e1d3e8-e4aa-43d0-a5c8-61d2dba4a7af-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-nwjdw\" (UID: \"21e1d3e8-e4aa-43d0-a5c8-61d2dba4a7af\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-nwjdw" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.938834 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dmc2r\" (UniqueName: \"kubernetes.io/projected/3b34b90b-583c-444d-921c-0d5fa13835d6-kube-api-access-dmc2r\") pod \"authentication-operator-69f744f599-rd6cq\" (UID: \"3b34b90b-583c-444d-921c-0d5fa13835d6\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rd6cq" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.938874 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/9a3253d3-c916-477b-82cd-7f7911bfc1b0-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-h9qcg\" (UID: \"9a3253d3-c916-477b-82cd-7f7911bfc1b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-h9qcg" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.938906 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nwjfh\" (UniqueName: \"kubernetes.io/projected/a7232476-6ca7-47a3-8a5d-2d0e26afebdc-kube-api-access-nwjfh\") pod \"openshift-controller-manager-operator-756b6f6bc6-5bjvj\" (UID: \"a7232476-6ca7-47a3-8a5d-2d0e26afebdc\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-5bjvj" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.938924 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/9a3253d3-c916-477b-82cd-7f7911bfc1b0-audit-dir\") pod \"oauth-openshift-558db77b4-h9qcg\" (UID: \"9a3253d3-c916-477b-82cd-7f7911bfc1b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-h9qcg" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.938944 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3b34b90b-583c-444d-921c-0d5fa13835d6-serving-cert\") pod \"authentication-operator-69f744f599-rd6cq\" (UID: \"3b34b90b-583c-444d-921c-0d5fa13835d6\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rd6cq" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.938967 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/bead4097-1138-4381-9884-93bbf059b717-default-certificate\") pod \"router-default-5444994796-gx2fb\" (UID: \"bead4097-1138-4381-9884-93bbf059b717\") " pod="openshift-ingress/router-default-5444994796-gx2fb" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.938995 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d3eae054-c8ac-4d16-a058-8a724a8a6d0f-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-mmgf6\" (UID: \"d3eae054-c8ac-4d16-a058-8a724a8a6d0f\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-mmgf6" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.939024 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d3db165e-e40f-4731-9bc8-518b8bf79f14-serving-cert\") pod \"service-ca-operator-777779d784-wslbb\" (UID: \"d3db165e-e40f-4731-9bc8-518b8bf79f14\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wslbb" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.939048 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/ae2112f8-47a1-4faf-8ee6-83f96c5a3def-mountpoint-dir\") pod \"csi-hostpathplugin-grwjm\" (UID: \"ae2112f8-47a1-4faf-8ee6-83f96c5a3def\") " pod="hostpath-provisioner/csi-hostpathplugin-grwjm" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.939252 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2nj55\" (UniqueName: \"kubernetes.io/projected/d4fc0a96-1ce0-4e5d-a743-3256db5295bb-kube-api-access-2nj55\") pod \"migrator-59844c95c7-rlm59\" (UID: \"d4fc0a96-1ce0-4e5d-a743-3256db5295bb\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-rlm59" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.939273 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/4357f10f-dad3-4233-9d03-1cad6319e4a9-registry-tls\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.939283 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vqwwf\" (UniqueName: \"kubernetes.io/projected/9da83d12-76d2-4194-a344-a1453b536a27-kube-api-access-vqwwf\") pod \"kube-storage-version-migrator-operator-b67b599dd-zqthg\" (UID: \"9da83d12-76d2-4194-a344-a1453b536a27\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-zqthg" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.939389 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9x5rk\" (UniqueName: \"kubernetes.io/projected/9323b64c-27c9-45cc-92bf-520f640e5126-kube-api-access-9x5rk\") pod \"package-server-manager-789f6589d5-2gvvc\" (UID: \"9323b64c-27c9-45cc-92bf-520f640e5126\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2gvvc" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.939423 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/639123e9-fc56-4554-a59a-0d2c3866c340-metrics-tls\") pod \"ingress-operator-5b745b69d9-5f6q5\" (UID: \"639123e9-fc56-4554-a59a-0d2c3866c340\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5f6q5" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.939464 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/615ffb8b-fb38-488c-b326-df6086017073-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-s8dhg\" (UID: \"615ffb8b-fb38-488c-b326-df6086017073\") " pod="openshift-marketplace/marketplace-operator-79b997595-s8dhg" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.939501 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cd47cdce-41f4-416c-9436-4c386c50eb9e-serving-cert\") pod \"route-controller-manager-6576b87f9c-2mk6j\" (UID: \"cd47cdce-41f4-416c-9436-4c386c50eb9e\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2mk6j" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.939531 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/49f9ff74-e395-4c2a-b467-c248ab4ae4bb-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-s62fl\" (UID: \"49f9ff74-e395-4c2a-b467-c248ab4ae4bb\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-s62fl" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.939562 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/ae2112f8-47a1-4faf-8ee6-83f96c5a3def-csi-data-dir\") pod \"csi-hostpathplugin-grwjm\" (UID: \"ae2112f8-47a1-4faf-8ee6-83f96c5a3def\") " pod="hostpath-provisioner/csi-hostpathplugin-grwjm" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.939603 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/9a3253d3-c916-477b-82cd-7f7911bfc1b0-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-h9qcg\" (UID: \"9a3253d3-c916-477b-82cd-7f7911bfc1b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-h9qcg" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.939637 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sbjl5\" (UniqueName: \"kubernetes.io/projected/9a3253d3-c916-477b-82cd-7f7911bfc1b0-kube-api-access-sbjl5\") pod \"oauth-openshift-558db77b4-h9qcg\" (UID: \"9a3253d3-c916-477b-82cd-7f7911bfc1b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-h9qcg" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.939143 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/9a3253d3-c916-477b-82cd-7f7911bfc1b0-audit-dir\") pod \"oauth-openshift-558db77b4-h9qcg\" (UID: \"9a3253d3-c916-477b-82cd-7f7911bfc1b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-h9qcg" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.939676 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6w9x4\" (UniqueName: \"kubernetes.io/projected/36aba054-4229-40fb-8fd4-344cd9f61a40-kube-api-access-6w9x4\") pod \"control-plane-machine-set-operator-78cbb6b69f-bgnq2\" (UID: \"36aba054-4229-40fb-8fd4-344cd9f61a40\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-bgnq2" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.939734 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/bead4097-1138-4381-9884-93bbf059b717-metrics-certs\") pod \"router-default-5444994796-gx2fb\" (UID: \"bead4097-1138-4381-9884-93bbf059b717\") " pod="openshift-ingress/router-default-5444994796-gx2fb" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.939781 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/4357f10f-dad3-4233-9d03-1cad6319e4a9-bound-sa-token\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.939855 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/79387310-5596-4b5f-af33-0f6b8a9a40ff-metrics-tls\") pod \"dns-operator-744455d44c-dl4br\" (UID: \"79387310-5596-4b5f-af33-0f6b8a9a40ff\") " pod="openshift-dns-operator/dns-operator-744455d44c-dl4br" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.940015 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/4357f10f-dad3-4233-9d03-1cad6319e4a9-registry-certificates\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.940067 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nlb4z\" (UniqueName: \"kubernetes.io/projected/a47f0c8e-aa7c-456e-a69d-9b3f4c02bfb8-kube-api-access-nlb4z\") pod \"cluster-samples-operator-665b6dd947-ms4bf\" (UID: \"a47f0c8e-aa7c-456e-a69d-9b3f4c02bfb8\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-ms4bf" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.940118 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/cce1e199-e6e7-41e1-b1b6-83f8d435c80e-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-k7lgs\" (UID: \"cce1e199-e6e7-41e1-b1b6-83f8d435c80e\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-k7lgs" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.940162 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-md58v\" (UniqueName: \"kubernetes.io/projected/d3db165e-e40f-4731-9bc8-518b8bf79f14-kube-api-access-md58v\") pod \"service-ca-operator-777779d784-wslbb\" (UID: \"d3db165e-e40f-4731-9bc8-518b8bf79f14\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wslbb" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.940204 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/72e24945-9968-4fe1-acd3-84d4c0e6099f-srv-cert\") pod \"olm-operator-6b444d44fb-8gplk\" (UID: \"72e24945-9968-4fe1-acd3-84d4c0e6099f\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8gplk" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.940242 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/8fd16ad3-3d7f-4b84-baf5-bb7a3dd07cd9-auth-proxy-config\") pod \"machine-config-operator-74547568cd-nh5bz\" (UID: \"8fd16ad3-3d7f-4b84-baf5-bb7a3dd07cd9\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nh5bz" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.940321 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/bead4097-1138-4381-9884-93bbf059b717-stats-auth\") pod \"router-default-5444994796-gx2fb\" (UID: \"bead4097-1138-4381-9884-93bbf059b717\") " pod="openshift-ingress/router-default-5444994796-gx2fb" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.940372 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/3636d93a-cce9-4086-9cdc-b907988e1ff8-etcd-service-ca\") pod \"etcd-operator-b45778765-j9nwx\" (UID: \"3636d93a-cce9-4086-9cdc-b907988e1ff8\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j9nwx" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.940603 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.940619 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d3db165e-e40f-4731-9bc8-518b8bf79f14-config\") pod \"service-ca-operator-777779d784-wslbb\" (UID: \"d3db165e-e40f-4731-9bc8-518b8bf79f14\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wslbb" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.940693 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5bqg2\" (UniqueName: \"kubernetes.io/projected/cce1e199-e6e7-41e1-b1b6-83f8d435c80e-kube-api-access-5bqg2\") pod \"cluster-image-registry-operator-dc59b4c8b-k7lgs\" (UID: \"cce1e199-e6e7-41e1-b1b6-83f8d435c80e\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-k7lgs" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.940777 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.940861 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/3b6e2fa8-d098-4250-b417-c577a01c5975-srv-cert\") pod \"catalog-operator-68c6474976-dn2nd\" (UID: \"3b6e2fa8-d098-4250-b417-c577a01c5975\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-dn2nd" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.940915 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/db0c906b-7d60-4048-ad1d-e9765282348f-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-cp2jw\" (UID: \"db0c906b-7d60-4048-ad1d-e9765282348f\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-cp2jw" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.940988 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/36aba054-4229-40fb-8fd4-344cd9f61a40-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-bgnq2\" (UID: \"36aba054-4229-40fb-8fd4-344cd9f61a40\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-bgnq2" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.941440 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/9a3253d3-c916-477b-82cd-7f7911bfc1b0-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-h9qcg\" (UID: \"9a3253d3-c916-477b-82cd-7f7911bfc1b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-h9qcg" Sep 29 21:27:41 crc kubenswrapper[4911]: E0929 21:27:41.941859 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 21:27:42.441847367 +0000 UTC m=+140.418960038 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8djg4" (UID: "4357f10f-dad3-4233-9d03-1cad6319e4a9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.942333 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-96xkl\" (UniqueName: \"kubernetes.io/projected/b6d3166a-4bc1-416c-b6f4-68207a1b0ccb-kube-api-access-96xkl\") pod \"machine-approver-56656f9798-kxz4b\" (UID: \"b6d3166a-4bc1-416c-b6f4-68207a1b0ccb\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-kxz4b" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.942405 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/4357f10f-dad3-4233-9d03-1cad6319e4a9-installation-pull-secrets\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.942467 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nfd6l\" (UniqueName: \"kubernetes.io/projected/bb0f6f96-6c75-46c0-9a5e-78310761816e-kube-api-access-nfd6l\") pod \"machine-config-server-q2dkc\" (UID: \"bb0f6f96-6c75-46c0-9a5e-78310761816e\") " pod="openshift-machine-config-operator/machine-config-server-q2dkc" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.942498 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/b6d3166a-4bc1-416c-b6f4-68207a1b0ccb-machine-approver-tls\") pod \"machine-approver-56656f9798-kxz4b\" (UID: \"b6d3166a-4bc1-416c-b6f4-68207a1b0ccb\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-kxz4b" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.942531 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4wlfq\" (UniqueName: \"kubernetes.io/projected/79387310-5596-4b5f-af33-0f6b8a9a40ff-kube-api-access-4wlfq\") pod \"dns-operator-744455d44c-dl4br\" (UID: \"79387310-5596-4b5f-af33-0f6b8a9a40ff\") " pod="openshift-dns-operator/dns-operator-744455d44c-dl4br" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.942587 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/615ffb8b-fb38-488c-b326-df6086017073-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-s8dhg\" (UID: \"615ffb8b-fb38-488c-b326-df6086017073\") " pod="openshift-marketplace/marketplace-operator-79b997595-s8dhg" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.942617 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5mqw9\" (UniqueName: \"kubernetes.io/projected/72e24945-9968-4fe1-acd3-84d4c0e6099f-kube-api-access-5mqw9\") pod \"olm-operator-6b444d44fb-8gplk\" (UID: \"72e24945-9968-4fe1-acd3-84d4c0e6099f\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8gplk" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.942669 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/cce1e199-e6e7-41e1-b1b6-83f8d435c80e-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-k7lgs\" (UID: \"cce1e199-e6e7-41e1-b1b6-83f8d435c80e\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-k7lgs" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.942702 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/49f9ff74-e395-4c2a-b467-c248ab4ae4bb-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-s62fl\" (UID: \"49f9ff74-e395-4c2a-b467-c248ab4ae4bb\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-s62fl" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.942736 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/b6d3166a-4bc1-416c-b6f4-68207a1b0ccb-auth-proxy-config\") pod \"machine-approver-56656f9798-kxz4b\" (UID: \"b6d3166a-4bc1-416c-b6f4-68207a1b0ccb\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-kxz4b" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.942763 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/bead4097-1138-4381-9884-93bbf059b717-service-ca-bundle\") pod \"router-default-5444994796-gx2fb\" (UID: \"bead4097-1138-4381-9884-93bbf059b717\") " pod="openshift-ingress/router-default-5444994796-gx2fb" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.942814 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/9a3253d3-c916-477b-82cd-7f7911bfc1b0-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-h9qcg\" (UID: \"9a3253d3-c916-477b-82cd-7f7911bfc1b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-h9qcg" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.942824 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/ae2112f8-47a1-4faf-8ee6-83f96c5a3def-socket-dir\") pod \"csi-hostpathplugin-grwjm\" (UID: \"ae2112f8-47a1-4faf-8ee6-83f96c5a3def\") " pod="hostpath-provisioner/csi-hostpathplugin-grwjm" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.942862 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/9a3253d3-c916-477b-82cd-7f7911bfc1b0-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-h9qcg\" (UID: \"9a3253d3-c916-477b-82cd-7f7911bfc1b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-h9qcg" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.942892 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w5xkh\" (UniqueName: \"kubernetes.io/projected/639123e9-fc56-4554-a59a-0d2c3866c340-kube-api-access-w5xkh\") pod \"ingress-operator-5b745b69d9-5f6q5\" (UID: \"639123e9-fc56-4554-a59a-0d2c3866c340\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5f6q5" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.942921 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9a3253d3-c916-477b-82cd-7f7911bfc1b0-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-h9qcg\" (UID: \"9a3253d3-c916-477b-82cd-7f7911bfc1b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-h9qcg" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.942972 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/a3def338-7395-4682-81be-d3e671abf2ed-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-6rs7d\" (UID: \"a3def338-7395-4682-81be-d3e671abf2ed\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6rs7d" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.943008 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/9323b64c-27c9-45cc-92bf-520f640e5126-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-2gvvc\" (UID: \"9323b64c-27c9-45cc-92bf-520f640e5126\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2gvvc" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.943736 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/a3def338-7395-4682-81be-d3e671abf2ed-proxy-tls\") pod \"machine-config-controller-84d6567774-6rs7d\" (UID: \"a3def338-7395-4682-81be-d3e671abf2ed\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6rs7d" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.943805 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/9a3253d3-c916-477b-82cd-7f7911bfc1b0-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-h9qcg\" (UID: \"9a3253d3-c916-477b-82cd-7f7911bfc1b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-h9qcg" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.943836 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a7232476-6ca7-47a3-8a5d-2d0e26afebdc-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-5bjvj\" (UID: \"a7232476-6ca7-47a3-8a5d-2d0e26afebdc\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-5bjvj" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.943835 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/9a3253d3-c916-477b-82cd-7f7911bfc1b0-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-h9qcg\" (UID: \"9a3253d3-c916-477b-82cd-7f7911bfc1b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-h9qcg" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.943860 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/9ff35d74-5cd5-4d25-bb50-8302240285b1-webhook-cert\") pod \"packageserver-d55dfcdfc-pjbd8\" (UID: \"9ff35d74-5cd5-4d25-bb50-8302240285b1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pjbd8" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.943891 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/bb0f6f96-6c75-46c0-9a5e-78310761816e-node-bootstrap-token\") pod \"machine-config-server-q2dkc\" (UID: \"bb0f6f96-6c75-46c0-9a5e-78310761816e\") " pod="openshift-machine-config-operator/machine-config-server-q2dkc" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.943916 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2ad88e09-2e13-4070-9c1f-75cb9dd12ebf-config-volume\") pod \"collect-profiles-29319675-qdtss\" (UID: \"2ad88e09-2e13-4070-9c1f-75cb9dd12ebf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319675-qdtss" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.943943 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b6d3166a-4bc1-416c-b6f4-68207a1b0ccb-config\") pod \"machine-approver-56656f9798-kxz4b\" (UID: \"b6d3166a-4bc1-416c-b6f4-68207a1b0ccb\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-kxz4b" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.943984 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/cd47cdce-41f4-416c-9436-4c386c50eb9e-client-ca\") pod \"route-controller-manager-6576b87f9c-2mk6j\" (UID: \"cd47cdce-41f4-416c-9436-4c386c50eb9e\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2mk6j" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.944009 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xg5w7\" (UniqueName: \"kubernetes.io/projected/615ffb8b-fb38-488c-b326-df6086017073-kube-api-access-xg5w7\") pod \"marketplace-operator-79b997595-s8dhg\" (UID: \"615ffb8b-fb38-488c-b326-df6086017073\") " pod="openshift-marketplace/marketplace-operator-79b997595-s8dhg" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.944048 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-46w52\" (UniqueName: \"kubernetes.io/projected/4357f10f-dad3-4233-9d03-1cad6319e4a9-kube-api-access-46w52\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.944075 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/9a3253d3-c916-477b-82cd-7f7911bfc1b0-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-h9qcg\" (UID: \"9a3253d3-c916-477b-82cd-7f7911bfc1b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-h9qcg" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.944101 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a47f0c8e-aa7c-456e-a69d-9b3f4c02bfb8-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-ms4bf\" (UID: \"a47f0c8e-aa7c-456e-a69d-9b3f4c02bfb8\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-ms4bf" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.944470 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/cce1e199-e6e7-41e1-b1b6-83f8d435c80e-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-k7lgs\" (UID: \"cce1e199-e6e7-41e1-b1b6-83f8d435c80e\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-k7lgs" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.944706 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/9a3253d3-c916-477b-82cd-7f7911bfc1b0-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-h9qcg\" (UID: \"9a3253d3-c916-477b-82cd-7f7911bfc1b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-h9qcg" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.945110 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/79387310-5596-4b5f-af33-0f6b8a9a40ff-metrics-tls\") pod \"dns-operator-744455d44c-dl4br\" (UID: \"79387310-5596-4b5f-af33-0f6b8a9a40ff\") " pod="openshift-dns-operator/dns-operator-744455d44c-dl4br" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.945420 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/4357f10f-dad3-4233-9d03-1cad6319e4a9-installation-pull-secrets\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.945716 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/9a3253d3-c916-477b-82cd-7f7911bfc1b0-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-h9qcg\" (UID: \"9a3253d3-c916-477b-82cd-7f7911bfc1b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-h9qcg" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.946127 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4357f10f-dad3-4233-9d03-1cad6319e4a9-trusted-ca\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.946569 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/4357f10f-dad3-4233-9d03-1cad6319e4a9-registry-certificates\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.946975 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/9a3253d3-c916-477b-82cd-7f7911bfc1b0-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-h9qcg\" (UID: \"9a3253d3-c916-477b-82cd-7f7911bfc1b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-h9qcg" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.947186 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/9a3253d3-c916-477b-82cd-7f7911bfc1b0-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-h9qcg\" (UID: \"9a3253d3-c916-477b-82cd-7f7911bfc1b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-h9qcg" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.947470 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/cce1e199-e6e7-41e1-b1b6-83f8d435c80e-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-k7lgs\" (UID: \"cce1e199-e6e7-41e1-b1b6-83f8d435c80e\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-k7lgs" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.949713 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a47f0c8e-aa7c-456e-a69d-9b3f4c02bfb8-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-ms4bf\" (UID: \"a47f0c8e-aa7c-456e-a69d-9b3f4c02bfb8\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-ms4bf" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.951602 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/9a3253d3-c916-477b-82cd-7f7911bfc1b0-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-h9qcg\" (UID: \"9a3253d3-c916-477b-82cd-7f7911bfc1b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-h9qcg" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.959116 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.961101 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/9a3253d3-c916-477b-82cd-7f7911bfc1b0-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-h9qcg\" (UID: \"9a3253d3-c916-477b-82cd-7f7911bfc1b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-h9qcg" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.979414 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Sep 29 21:27:41 crc kubenswrapper[4911]: I0929 21:27:41.997755 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.019510 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.045694 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.045997 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/3636d93a-cce9-4086-9cdc-b907988e1ff8-etcd-client\") pod \"etcd-operator-b45778765-j9nwx\" (UID: \"3636d93a-cce9-4086-9cdc-b907988e1ff8\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j9nwx" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.046037 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/639123e9-fc56-4554-a59a-0d2c3866c340-bound-sa-token\") pod \"ingress-operator-5b745b69d9-5f6q5\" (UID: \"639123e9-fc56-4554-a59a-0d2c3866c340\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5f6q5" Sep 29 21:27:42 crc kubenswrapper[4911]: E0929 21:27:42.046081 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 21:27:42.546036648 +0000 UTC m=+140.523149459 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.046163 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b5lbq\" (UniqueName: \"kubernetes.io/projected/7362e2ef-cab6-4891-bb59-a7969b6e72b6-kube-api-access-b5lbq\") pod \"downloads-7954f5f757-mrbmd\" (UID: \"7362e2ef-cab6-4891-bb59-a7969b6e72b6\") " pod="openshift-console/downloads-7954f5f757-mrbmd" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.046234 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a7232476-6ca7-47a3-8a5d-2d0e26afebdc-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-5bjvj\" (UID: \"a7232476-6ca7-47a3-8a5d-2d0e26afebdc\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-5bjvj" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.046281 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/ae2112f8-47a1-4faf-8ee6-83f96c5a3def-registration-dir\") pod \"csi-hostpathplugin-grwjm\" (UID: \"ae2112f8-47a1-4faf-8ee6-83f96c5a3def\") " pod="hostpath-provisioner/csi-hostpathplugin-grwjm" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.046310 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/ae2112f8-47a1-4faf-8ee6-83f96c5a3def-plugins-dir\") pod \"csi-hostpathplugin-grwjm\" (UID: \"ae2112f8-47a1-4faf-8ee6-83f96c5a3def\") " pod="hostpath-provisioner/csi-hostpathplugin-grwjm" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.046345 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/3636d93a-cce9-4086-9cdc-b907988e1ff8-etcd-ca\") pod \"etcd-operator-b45778765-j9nwx\" (UID: \"3636d93a-cce9-4086-9cdc-b907988e1ff8\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j9nwx" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.046382 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3636d93a-cce9-4086-9cdc-b907988e1ff8-config\") pod \"etcd-operator-b45778765-j9nwx\" (UID: \"3636d93a-cce9-4086-9cdc-b907988e1ff8\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j9nwx" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.046423 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/8fd16ad3-3d7f-4b84-baf5-bb7a3dd07cd9-proxy-tls\") pod \"machine-config-operator-74547568cd-nh5bz\" (UID: \"8fd16ad3-3d7f-4b84-baf5-bb7a3dd07cd9\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nh5bz" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.046448 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/639123e9-fc56-4554-a59a-0d2c3866c340-trusted-ca\") pod \"ingress-operator-5b745b69d9-5f6q5\" (UID: \"639123e9-fc56-4554-a59a-0d2c3866c340\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5f6q5" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.046481 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g2zzr\" (UniqueName: \"kubernetes.io/projected/3636d93a-cce9-4086-9cdc-b907988e1ff8-kube-api-access-g2zzr\") pod \"etcd-operator-b45778765-j9nwx\" (UID: \"3636d93a-cce9-4086-9cdc-b907988e1ff8\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j9nwx" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.046507 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c4xks\" (UniqueName: \"kubernetes.io/projected/03fb6356-2be5-4caa-b804-5b44d2b52c61-kube-api-access-c4xks\") pod \"service-ca-9c57cc56f-dxnj8\" (UID: \"03fb6356-2be5-4caa-b804-5b44d2b52c61\") " pod="openshift-service-ca/service-ca-9c57cc56f-dxnj8" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.046532 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8nn8m\" (UniqueName: \"kubernetes.io/projected/9ff35d74-5cd5-4d25-bb50-8302240285b1-kube-api-access-8nn8m\") pod \"packageserver-d55dfcdfc-pjbd8\" (UID: \"9ff35d74-5cd5-4d25-bb50-8302240285b1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pjbd8" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.046558 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3b34b90b-583c-444d-921c-0d5fa13835d6-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-rd6cq\" (UID: \"3b34b90b-583c-444d-921c-0d5fa13835d6\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rd6cq" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.046596 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd47cdce-41f4-416c-9436-4c386c50eb9e-config\") pod \"route-controller-manager-6576b87f9c-2mk6j\" (UID: \"cd47cdce-41f4-416c-9436-4c386c50eb9e\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2mk6j" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.046624 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/ae2112f8-47a1-4faf-8ee6-83f96c5a3def-registration-dir\") pod \"csi-hostpathplugin-grwjm\" (UID: \"ae2112f8-47a1-4faf-8ee6-83f96c5a3def\") " pod="hostpath-provisioner/csi-hostpathplugin-grwjm" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.046633 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/ae2112f8-47a1-4faf-8ee6-83f96c5a3def-plugins-dir\") pod \"csi-hostpathplugin-grwjm\" (UID: \"ae2112f8-47a1-4faf-8ee6-83f96c5a3def\") " pod="hostpath-provisioner/csi-hostpathplugin-grwjm" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.046631 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/03fb6356-2be5-4caa-b804-5b44d2b52c61-signing-cabundle\") pod \"service-ca-9c57cc56f-dxnj8\" (UID: \"03fb6356-2be5-4caa-b804-5b44d2b52c61\") " pod="openshift-service-ca/service-ca-9c57cc56f-dxnj8" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.046704 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k5q5w\" (UniqueName: \"kubernetes.io/projected/11c18a96-1e89-47a1-bae8-7184c53da82f-kube-api-access-k5q5w\") pod \"ingress-canary-cqtn6\" (UID: \"11c18a96-1e89-47a1-bae8-7184c53da82f\") " pod="openshift-ingress-canary/ingress-canary-cqtn6" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.046755 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-chcgd\" (UniqueName: \"kubernetes.io/projected/a3def338-7395-4682-81be-d3e671abf2ed-kube-api-access-chcgd\") pod \"machine-config-controller-84d6567774-6rs7d\" (UID: \"a3def338-7395-4682-81be-d3e671abf2ed\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6rs7d" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.046811 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/db0c906b-7d60-4048-ad1d-e9765282348f-config\") pod \"openshift-apiserver-operator-796bbdcf4f-cp2jw\" (UID: \"db0c906b-7d60-4048-ad1d-e9765282348f\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-cp2jw" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.046844 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f47px\" (UniqueName: \"kubernetes.io/projected/3b6e2fa8-d098-4250-b417-c577a01c5975-kube-api-access-f47px\") pod \"catalog-operator-68c6474976-dn2nd\" (UID: \"3b6e2fa8-d098-4250-b417-c577a01c5975\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-dn2nd" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.046872 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2ad88e09-2e13-4070-9c1f-75cb9dd12ebf-secret-volume\") pod \"collect-profiles-29319675-qdtss\" (UID: \"2ad88e09-2e13-4070-9c1f-75cb9dd12ebf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319675-qdtss" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.046920 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d3eae054-c8ac-4d16-a058-8a724a8a6d0f-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-mmgf6\" (UID: \"d3eae054-c8ac-4d16-a058-8a724a8a6d0f\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-mmgf6" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.046955 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5d7dea83-ad4f-4f99-a281-1dcec6929b25-config-volume\") pod \"dns-default-2pbvr\" (UID: \"5d7dea83-ad4f-4f99-a281-1dcec6929b25\") " pod="openshift-dns/dns-default-2pbvr" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.046986 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3b34b90b-583c-444d-921c-0d5fa13835d6-service-ca-bundle\") pod \"authentication-operator-69f744f599-rd6cq\" (UID: \"3b34b90b-583c-444d-921c-0d5fa13835d6\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rd6cq" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.047016 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/8fd16ad3-3d7f-4b84-baf5-bb7a3dd07cd9-images\") pod \"machine-config-operator-74547568cd-nh5bz\" (UID: \"8fd16ad3-3d7f-4b84-baf5-bb7a3dd07cd9\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nh5bz" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.047044 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/5d7dea83-ad4f-4f99-a281-1dcec6929b25-metrics-tls\") pod \"dns-default-2pbvr\" (UID: \"5d7dea83-ad4f-4f99-a281-1dcec6929b25\") " pod="openshift-dns/dns-default-2pbvr" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.047083 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/bb0f6f96-6c75-46c0-9a5e-78310761816e-certs\") pod \"machine-config-server-q2dkc\" (UID: \"bb0f6f96-6c75-46c0-9a5e-78310761816e\") " pod="openshift-machine-config-operator/machine-config-server-q2dkc" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.047114 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kdhfg\" (UniqueName: \"kubernetes.io/projected/2ad88e09-2e13-4070-9c1f-75cb9dd12ebf-kube-api-access-kdhfg\") pod \"collect-profiles-29319675-qdtss\" (UID: \"2ad88e09-2e13-4070-9c1f-75cb9dd12ebf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319675-qdtss" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.047143 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4679p\" (UniqueName: \"kubernetes.io/projected/bead4097-1138-4381-9884-93bbf059b717-kube-api-access-4679p\") pod \"router-default-5444994796-gx2fb\" (UID: \"bead4097-1138-4381-9884-93bbf059b717\") " pod="openshift-ingress/router-default-5444994796-gx2fb" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.047175 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9da83d12-76d2-4194-a344-a1453b536a27-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-zqthg\" (UID: \"9da83d12-76d2-4194-a344-a1453b536a27\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-zqthg" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.047207 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/03fb6356-2be5-4caa-b804-5b44d2b52c61-signing-key\") pod \"service-ca-9c57cc56f-dxnj8\" (UID: \"03fb6356-2be5-4caa-b804-5b44d2b52c61\") " pod="openshift-service-ca/service-ca-9c57cc56f-dxnj8" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.047239 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qkvzp\" (UniqueName: \"kubernetes.io/projected/21e1d3e8-e4aa-43d0-a5c8-61d2dba4a7af-kube-api-access-qkvzp\") pod \"multus-admission-controller-857f4d67dd-nwjdw\" (UID: \"21e1d3e8-e4aa-43d0-a5c8-61d2dba4a7af\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-nwjdw" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.047275 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3b34b90b-583c-444d-921c-0d5fa13835d6-config\") pod \"authentication-operator-69f744f599-rd6cq\" (UID: \"3b34b90b-583c-444d-921c-0d5fa13835d6\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rd6cq" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.047303 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/9ff35d74-5cd5-4d25-bb50-8302240285b1-tmpfs\") pod \"packageserver-d55dfcdfc-pjbd8\" (UID: \"9ff35d74-5cd5-4d25-bb50-8302240285b1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pjbd8" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.047329 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/11c18a96-1e89-47a1-bae8-7184c53da82f-cert\") pod \"ingress-canary-cqtn6\" (UID: \"11c18a96-1e89-47a1-bae8-7184c53da82f\") " pod="openshift-ingress-canary/ingress-canary-cqtn6" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.047357 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/21e1d3e8-e4aa-43d0-a5c8-61d2dba4a7af-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-nwjdw\" (UID: \"21e1d3e8-e4aa-43d0-a5c8-61d2dba4a7af\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-nwjdw" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.047381 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nwjfh\" (UniqueName: \"kubernetes.io/projected/a7232476-6ca7-47a3-8a5d-2d0e26afebdc-kube-api-access-nwjfh\") pod \"openshift-controller-manager-operator-756b6f6bc6-5bjvj\" (UID: \"a7232476-6ca7-47a3-8a5d-2d0e26afebdc\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-5bjvj" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.047405 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dmc2r\" (UniqueName: \"kubernetes.io/projected/3b34b90b-583c-444d-921c-0d5fa13835d6-kube-api-access-dmc2r\") pod \"authentication-operator-69f744f599-rd6cq\" (UID: \"3b34b90b-583c-444d-921c-0d5fa13835d6\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rd6cq" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.047431 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3b34b90b-583c-444d-921c-0d5fa13835d6-serving-cert\") pod \"authentication-operator-69f744f599-rd6cq\" (UID: \"3b34b90b-583c-444d-921c-0d5fa13835d6\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rd6cq" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.047457 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/ae2112f8-47a1-4faf-8ee6-83f96c5a3def-mountpoint-dir\") pod \"csi-hostpathplugin-grwjm\" (UID: \"ae2112f8-47a1-4faf-8ee6-83f96c5a3def\") " pod="hostpath-provisioner/csi-hostpathplugin-grwjm" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.047479 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/bead4097-1138-4381-9884-93bbf059b717-default-certificate\") pod \"router-default-5444994796-gx2fb\" (UID: \"bead4097-1138-4381-9884-93bbf059b717\") " pod="openshift-ingress/router-default-5444994796-gx2fb" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.047504 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d3eae054-c8ac-4d16-a058-8a724a8a6d0f-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-mmgf6\" (UID: \"d3eae054-c8ac-4d16-a058-8a724a8a6d0f\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-mmgf6" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.047529 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d3db165e-e40f-4731-9bc8-518b8bf79f14-serving-cert\") pod \"service-ca-operator-777779d784-wslbb\" (UID: \"d3db165e-e40f-4731-9bc8-518b8bf79f14\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wslbb" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.047568 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2nj55\" (UniqueName: \"kubernetes.io/projected/d4fc0a96-1ce0-4e5d-a743-3256db5295bb-kube-api-access-2nj55\") pod \"migrator-59844c95c7-rlm59\" (UID: \"d4fc0a96-1ce0-4e5d-a743-3256db5295bb\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-rlm59" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.047590 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vqwwf\" (UniqueName: \"kubernetes.io/projected/9da83d12-76d2-4194-a344-a1453b536a27-kube-api-access-vqwwf\") pod \"kube-storage-version-migrator-operator-b67b599dd-zqthg\" (UID: \"9da83d12-76d2-4194-a344-a1453b536a27\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-zqthg" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.047625 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9x5rk\" (UniqueName: \"kubernetes.io/projected/9323b64c-27c9-45cc-92bf-520f640e5126-kube-api-access-9x5rk\") pod \"package-server-manager-789f6589d5-2gvvc\" (UID: \"9323b64c-27c9-45cc-92bf-520f640e5126\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2gvvc" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.047646 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/639123e9-fc56-4554-a59a-0d2c3866c340-metrics-tls\") pod \"ingress-operator-5b745b69d9-5f6q5\" (UID: \"639123e9-fc56-4554-a59a-0d2c3866c340\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5f6q5" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.047678 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/615ffb8b-fb38-488c-b326-df6086017073-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-s8dhg\" (UID: \"615ffb8b-fb38-488c-b326-df6086017073\") " pod="openshift-marketplace/marketplace-operator-79b997595-s8dhg" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.047699 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cd47cdce-41f4-416c-9436-4c386c50eb9e-serving-cert\") pod \"route-controller-manager-6576b87f9c-2mk6j\" (UID: \"cd47cdce-41f4-416c-9436-4c386c50eb9e\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2mk6j" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.047721 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/49f9ff74-e395-4c2a-b467-c248ab4ae4bb-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-s62fl\" (UID: \"49f9ff74-e395-4c2a-b467-c248ab4ae4bb\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-s62fl" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.047757 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/ae2112f8-47a1-4faf-8ee6-83f96c5a3def-csi-data-dir\") pod \"csi-hostpathplugin-grwjm\" (UID: \"ae2112f8-47a1-4faf-8ee6-83f96c5a3def\") " pod="hostpath-provisioner/csi-hostpathplugin-grwjm" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.047813 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6w9x4\" (UniqueName: \"kubernetes.io/projected/36aba054-4229-40fb-8fd4-344cd9f61a40-kube-api-access-6w9x4\") pod \"control-plane-machine-set-operator-78cbb6b69f-bgnq2\" (UID: \"36aba054-4229-40fb-8fd4-344cd9f61a40\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-bgnq2" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.047838 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/bead4097-1138-4381-9884-93bbf059b717-metrics-certs\") pod \"router-default-5444994796-gx2fb\" (UID: \"bead4097-1138-4381-9884-93bbf059b717\") " pod="openshift-ingress/router-default-5444994796-gx2fb" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.047897 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-md58v\" (UniqueName: \"kubernetes.io/projected/d3db165e-e40f-4731-9bc8-518b8bf79f14-kube-api-access-md58v\") pod \"service-ca-operator-777779d784-wslbb\" (UID: \"d3db165e-e40f-4731-9bc8-518b8bf79f14\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wslbb" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.047925 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/72e24945-9968-4fe1-acd3-84d4c0e6099f-srv-cert\") pod \"olm-operator-6b444d44fb-8gplk\" (UID: \"72e24945-9968-4fe1-acd3-84d4c0e6099f\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8gplk" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.047958 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/8fd16ad3-3d7f-4b84-baf5-bb7a3dd07cd9-auth-proxy-config\") pod \"machine-config-operator-74547568cd-nh5bz\" (UID: \"8fd16ad3-3d7f-4b84-baf5-bb7a3dd07cd9\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nh5bz" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.047989 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/bead4097-1138-4381-9884-93bbf059b717-stats-auth\") pod \"router-default-5444994796-gx2fb\" (UID: \"bead4097-1138-4381-9884-93bbf059b717\") " pod="openshift-ingress/router-default-5444994796-gx2fb" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.048024 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/3636d93a-cce9-4086-9cdc-b907988e1ff8-etcd-service-ca\") pod \"etcd-operator-b45778765-j9nwx\" (UID: \"3636d93a-cce9-4086-9cdc-b907988e1ff8\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j9nwx" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.048055 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d3db165e-e40f-4731-9bc8-518b8bf79f14-config\") pod \"service-ca-operator-777779d784-wslbb\" (UID: \"d3db165e-e40f-4731-9bc8-518b8bf79f14\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wslbb" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.048079 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/ae2112f8-47a1-4faf-8ee6-83f96c5a3def-mountpoint-dir\") pod \"csi-hostpathplugin-grwjm\" (UID: \"ae2112f8-47a1-4faf-8ee6-83f96c5a3def\") " pod="hostpath-provisioner/csi-hostpathplugin-grwjm" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.048090 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.048121 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/3b6e2fa8-d098-4250-b417-c577a01c5975-srv-cert\") pod \"catalog-operator-68c6474976-dn2nd\" (UID: \"3b6e2fa8-d098-4250-b417-c577a01c5975\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-dn2nd" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.048148 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/db0c906b-7d60-4048-ad1d-e9765282348f-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-cp2jw\" (UID: \"db0c906b-7d60-4048-ad1d-e9765282348f\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-cp2jw" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.048177 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/36aba054-4229-40fb-8fd4-344cd9f61a40-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-bgnq2\" (UID: \"36aba054-4229-40fb-8fd4-344cd9f61a40\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-bgnq2" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.048208 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/b6d3166a-4bc1-416c-b6f4-68207a1b0ccb-machine-approver-tls\") pod \"machine-approver-56656f9798-kxz4b\" (UID: \"b6d3166a-4bc1-416c-b6f4-68207a1b0ccb\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-kxz4b" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.048240 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-96xkl\" (UniqueName: \"kubernetes.io/projected/b6d3166a-4bc1-416c-b6f4-68207a1b0ccb-kube-api-access-96xkl\") pod \"machine-approver-56656f9798-kxz4b\" (UID: \"b6d3166a-4bc1-416c-b6f4-68207a1b0ccb\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-kxz4b" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.048274 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nfd6l\" (UniqueName: \"kubernetes.io/projected/bb0f6f96-6c75-46c0-9a5e-78310761816e-kube-api-access-nfd6l\") pod \"machine-config-server-q2dkc\" (UID: \"bb0f6f96-6c75-46c0-9a5e-78310761816e\") " pod="openshift-machine-config-operator/machine-config-server-q2dkc" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.048313 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/615ffb8b-fb38-488c-b326-df6086017073-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-s8dhg\" (UID: \"615ffb8b-fb38-488c-b326-df6086017073\") " pod="openshift-marketplace/marketplace-operator-79b997595-s8dhg" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.048344 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5mqw9\" (UniqueName: \"kubernetes.io/projected/72e24945-9968-4fe1-acd3-84d4c0e6099f-kube-api-access-5mqw9\") pod \"olm-operator-6b444d44fb-8gplk\" (UID: \"72e24945-9968-4fe1-acd3-84d4c0e6099f\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8gplk" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.048372 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/49f9ff74-e395-4c2a-b467-c248ab4ae4bb-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-s62fl\" (UID: \"49f9ff74-e395-4c2a-b467-c248ab4ae4bb\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-s62fl" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.048369 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd47cdce-41f4-416c-9436-4c386c50eb9e-config\") pod \"route-controller-manager-6576b87f9c-2mk6j\" (UID: \"cd47cdce-41f4-416c-9436-4c386c50eb9e\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2mk6j" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.048433 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3b34b90b-583c-444d-921c-0d5fa13835d6-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-rd6cq\" (UID: \"3b34b90b-583c-444d-921c-0d5fa13835d6\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rd6cq" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.048658 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9da83d12-76d2-4194-a344-a1453b536a27-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-zqthg\" (UID: \"9da83d12-76d2-4194-a344-a1453b536a27\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-zqthg" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.048893 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3b34b90b-583c-444d-921c-0d5fa13835d6-config\") pod \"authentication-operator-69f744f599-rd6cq\" (UID: \"3b34b90b-583c-444d-921c-0d5fa13835d6\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rd6cq" Sep 29 21:27:42 crc kubenswrapper[4911]: E0929 21:27:42.048903 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 21:27:42.548850172 +0000 UTC m=+140.525962853 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8djg4" (UID: "4357f10f-dad3-4233-9d03-1cad6319e4a9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.048399 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/b6d3166a-4bc1-416c-b6f4-68207a1b0ccb-auth-proxy-config\") pod \"machine-approver-56656f9798-kxz4b\" (UID: \"b6d3166a-4bc1-416c-b6f4-68207a1b0ccb\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-kxz4b" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.049467 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/ae2112f8-47a1-4faf-8ee6-83f96c5a3def-csi-data-dir\") pod \"csi-hostpathplugin-grwjm\" (UID: \"ae2112f8-47a1-4faf-8ee6-83f96c5a3def\") " pod="hostpath-provisioner/csi-hostpathplugin-grwjm" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.049500 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/ae2112f8-47a1-4faf-8ee6-83f96c5a3def-socket-dir\") pod \"csi-hostpathplugin-grwjm\" (UID: \"ae2112f8-47a1-4faf-8ee6-83f96c5a3def\") " pod="hostpath-provisioner/csi-hostpathplugin-grwjm" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.049537 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/bead4097-1138-4381-9884-93bbf059b717-service-ca-bundle\") pod \"router-default-5444994796-gx2fb\" (UID: \"bead4097-1138-4381-9884-93bbf059b717\") " pod="openshift-ingress/router-default-5444994796-gx2fb" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.049576 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w5xkh\" (UniqueName: \"kubernetes.io/projected/639123e9-fc56-4554-a59a-0d2c3866c340-kube-api-access-w5xkh\") pod \"ingress-operator-5b745b69d9-5f6q5\" (UID: \"639123e9-fc56-4554-a59a-0d2c3866c340\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5f6q5" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.049608 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/ae2112f8-47a1-4faf-8ee6-83f96c5a3def-socket-dir\") pod \"csi-hostpathplugin-grwjm\" (UID: \"ae2112f8-47a1-4faf-8ee6-83f96c5a3def\") " pod="hostpath-provisioner/csi-hostpathplugin-grwjm" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.049639 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/a3def338-7395-4682-81be-d3e671abf2ed-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-6rs7d\" (UID: \"a3def338-7395-4682-81be-d3e671abf2ed\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6rs7d" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.049682 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/9323b64c-27c9-45cc-92bf-520f640e5126-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-2gvvc\" (UID: \"9323b64c-27c9-45cc-92bf-520f640e5126\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2gvvc" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.049712 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/a3def338-7395-4682-81be-d3e671abf2ed-proxy-tls\") pod \"machine-config-controller-84d6567774-6rs7d\" (UID: \"a3def338-7395-4682-81be-d3e671abf2ed\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6rs7d" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.049744 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a7232476-6ca7-47a3-8a5d-2d0e26afebdc-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-5bjvj\" (UID: \"a7232476-6ca7-47a3-8a5d-2d0e26afebdc\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-5bjvj" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.049769 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/9ff35d74-5cd5-4d25-bb50-8302240285b1-webhook-cert\") pod \"packageserver-d55dfcdfc-pjbd8\" (UID: \"9ff35d74-5cd5-4d25-bb50-8302240285b1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pjbd8" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.049816 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b6d3166a-4bc1-416c-b6f4-68207a1b0ccb-config\") pod \"machine-approver-56656f9798-kxz4b\" (UID: \"b6d3166a-4bc1-416c-b6f4-68207a1b0ccb\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-kxz4b" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.049851 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/bb0f6f96-6c75-46c0-9a5e-78310761816e-node-bootstrap-token\") pod \"machine-config-server-q2dkc\" (UID: \"bb0f6f96-6c75-46c0-9a5e-78310761816e\") " pod="openshift-machine-config-operator/machine-config-server-q2dkc" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.049885 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2ad88e09-2e13-4070-9c1f-75cb9dd12ebf-config-volume\") pod \"collect-profiles-29319675-qdtss\" (UID: \"2ad88e09-2e13-4070-9c1f-75cb9dd12ebf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319675-qdtss" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.049926 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/cd47cdce-41f4-416c-9436-4c386c50eb9e-client-ca\") pod \"route-controller-manager-6576b87f9c-2mk6j\" (UID: \"cd47cdce-41f4-416c-9436-4c386c50eb9e\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2mk6j" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.050678 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a7232476-6ca7-47a3-8a5d-2d0e26afebdc-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-5bjvj\" (UID: \"a7232476-6ca7-47a3-8a5d-2d0e26afebdc\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-5bjvj" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.050738 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xg5w7\" (UniqueName: \"kubernetes.io/projected/615ffb8b-fb38-488c-b326-df6086017073-kube-api-access-xg5w7\") pod \"marketplace-operator-79b997595-s8dhg\" (UID: \"615ffb8b-fb38-488c-b326-df6086017073\") " pod="openshift-marketplace/marketplace-operator-79b997595-s8dhg" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.050777 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3636d93a-cce9-4086-9cdc-b907988e1ff8-serving-cert\") pod \"etcd-operator-b45778765-j9nwx\" (UID: \"3636d93a-cce9-4086-9cdc-b907988e1ff8\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j9nwx" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.050811 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bkw58\" (UniqueName: \"kubernetes.io/projected/ae2112f8-47a1-4faf-8ee6-83f96c5a3def-kube-api-access-bkw58\") pod \"csi-hostpathplugin-grwjm\" (UID: \"ae2112f8-47a1-4faf-8ee6-83f96c5a3def\") " pod="hostpath-provisioner/csi-hostpathplugin-grwjm" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.050833 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/49f9ff74-e395-4c2a-b467-c248ab4ae4bb-config\") pod \"kube-controller-manager-operator-78b949d7b-s62fl\" (UID: \"49f9ff74-e395-4c2a-b467-c248ab4ae4bb\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-s62fl" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.050853 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9da83d12-76d2-4194-a344-a1453b536a27-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-zqthg\" (UID: \"9da83d12-76d2-4194-a344-a1453b536a27\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-zqthg" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.050873 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bpvlm\" (UniqueName: \"kubernetes.io/projected/8fd16ad3-3d7f-4b84-baf5-bb7a3dd07cd9-kube-api-access-bpvlm\") pod \"machine-config-operator-74547568cd-nh5bz\" (UID: \"8fd16ad3-3d7f-4b84-baf5-bb7a3dd07cd9\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nh5bz" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.050904 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/3b6e2fa8-d098-4250-b417-c577a01c5975-profile-collector-cert\") pod \"catalog-operator-68c6474976-dn2nd\" (UID: \"3b6e2fa8-d098-4250-b417-c577a01c5975\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-dn2nd" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.050922 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/9ff35d74-5cd5-4d25-bb50-8302240285b1-apiservice-cert\") pod \"packageserver-d55dfcdfc-pjbd8\" (UID: \"9ff35d74-5cd5-4d25-bb50-8302240285b1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pjbd8" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.050945 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d3eae054-c8ac-4d16-a058-8a724a8a6d0f-config\") pod \"kube-apiserver-operator-766d6c64bb-mmgf6\" (UID: \"d3eae054-c8ac-4d16-a058-8a724a8a6d0f\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-mmgf6" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.050968 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tf22b\" (UniqueName: \"kubernetes.io/projected/5d7dea83-ad4f-4f99-a281-1dcec6929b25-kube-api-access-tf22b\") pod \"dns-default-2pbvr\" (UID: \"5d7dea83-ad4f-4f99-a281-1dcec6929b25\") " pod="openshift-dns/dns-default-2pbvr" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.050986 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/cd47cdce-41f4-416c-9436-4c386c50eb9e-client-ca\") pod \"route-controller-manager-6576b87f9c-2mk6j\" (UID: \"cd47cdce-41f4-416c-9436-4c386c50eb9e\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2mk6j" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.050995 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vr22l\" (UniqueName: \"kubernetes.io/projected/cd47cdce-41f4-416c-9436-4c386c50eb9e-kube-api-access-vr22l\") pod \"route-controller-manager-6576b87f9c-2mk6j\" (UID: \"cd47cdce-41f4-416c-9436-4c386c50eb9e\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2mk6j" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.051014 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rppr2\" (UniqueName: \"kubernetes.io/projected/db0c906b-7d60-4048-ad1d-e9765282348f-kube-api-access-rppr2\") pod \"openshift-apiserver-operator-796bbdcf4f-cp2jw\" (UID: \"db0c906b-7d60-4048-ad1d-e9765282348f\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-cp2jw" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.051037 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/72e24945-9968-4fe1-acd3-84d4c0e6099f-profile-collector-cert\") pod \"olm-operator-6b444d44fb-8gplk\" (UID: \"72e24945-9968-4fe1-acd3-84d4c0e6099f\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8gplk" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.051105 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3b34b90b-583c-444d-921c-0d5fa13835d6-service-ca-bundle\") pod \"authentication-operator-69f744f599-rd6cq\" (UID: \"3b34b90b-583c-444d-921c-0d5fa13835d6\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rd6cq" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.051310 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a7232476-6ca7-47a3-8a5d-2d0e26afebdc-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-5bjvj\" (UID: \"a7232476-6ca7-47a3-8a5d-2d0e26afebdc\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-5bjvj" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.051489 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b6d3166a-4bc1-416c-b6f4-68207a1b0ccb-config\") pod \"machine-approver-56656f9798-kxz4b\" (UID: \"b6d3166a-4bc1-416c-b6f4-68207a1b0ccb\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-kxz4b" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.051644 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3b34b90b-583c-444d-921c-0d5fa13835d6-serving-cert\") pod \"authentication-operator-69f744f599-rd6cq\" (UID: \"3b34b90b-583c-444d-921c-0d5fa13835d6\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rd6cq" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.051842 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/8fd16ad3-3d7f-4b84-baf5-bb7a3dd07cd9-auth-proxy-config\") pod \"machine-config-operator-74547568cd-nh5bz\" (UID: \"8fd16ad3-3d7f-4b84-baf5-bb7a3dd07cd9\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nh5bz" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.052187 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/a3def338-7395-4682-81be-d3e671abf2ed-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-6rs7d\" (UID: \"a3def338-7395-4682-81be-d3e671abf2ed\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6rs7d" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.053257 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/36aba054-4229-40fb-8fd4-344cd9f61a40-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-bgnq2\" (UID: \"36aba054-4229-40fb-8fd4-344cd9f61a40\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-bgnq2" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.053737 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/21e1d3e8-e4aa-43d0-a5c8-61d2dba4a7af-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-nwjdw\" (UID: \"21e1d3e8-e4aa-43d0-a5c8-61d2dba4a7af\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-nwjdw" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.053840 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/615ffb8b-fb38-488c-b326-df6086017073-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-s8dhg\" (UID: \"615ffb8b-fb38-488c-b326-df6086017073\") " pod="openshift-marketplace/marketplace-operator-79b997595-s8dhg" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.054086 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/615ffb8b-fb38-488c-b326-df6086017073-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-s8dhg\" (UID: \"615ffb8b-fb38-488c-b326-df6086017073\") " pod="openshift-marketplace/marketplace-operator-79b997595-s8dhg" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.055748 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cd47cdce-41f4-416c-9436-4c386c50eb9e-serving-cert\") pod \"route-controller-manager-6576b87f9c-2mk6j\" (UID: \"cd47cdce-41f4-416c-9436-4c386c50eb9e\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2mk6j" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.057134 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9da83d12-76d2-4194-a344-a1453b536a27-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-zqthg\" (UID: \"9da83d12-76d2-4194-a344-a1453b536a27\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-zqthg" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.057336 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/b6d3166a-4bc1-416c-b6f4-68207a1b0ccb-machine-approver-tls\") pod \"machine-approver-56656f9798-kxz4b\" (UID: \"b6d3166a-4bc1-416c-b6f4-68207a1b0ccb\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-kxz4b" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.058486 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.061332 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/b6d3166a-4bc1-416c-b6f4-68207a1b0ccb-auth-proxy-config\") pod \"machine-approver-56656f9798-kxz4b\" (UID: \"b6d3166a-4bc1-416c-b6f4-68207a1b0ccb\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-kxz4b" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.061350 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/9ff35d74-5cd5-4d25-bb50-8302240285b1-tmpfs\") pod \"packageserver-d55dfcdfc-pjbd8\" (UID: \"9ff35d74-5cd5-4d25-bb50-8302240285b1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pjbd8" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.078977 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.099546 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.105914 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/72e24945-9968-4fe1-acd3-84d4c0e6099f-profile-collector-cert\") pod \"olm-operator-6b444d44fb-8gplk\" (UID: \"72e24945-9968-4fe1-acd3-84d4c0e6099f\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8gplk" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.106750 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/3b6e2fa8-d098-4250-b417-c577a01c5975-profile-collector-cert\") pod \"catalog-operator-68c6474976-dn2nd\" (UID: \"3b6e2fa8-d098-4250-b417-c577a01c5975\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-dn2nd" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.111164 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2ad88e09-2e13-4070-9c1f-75cb9dd12ebf-secret-volume\") pod \"collect-profiles-29319675-qdtss\" (UID: \"2ad88e09-2e13-4070-9c1f-75cb9dd12ebf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319675-qdtss" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.124332 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.133241 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/3b6e2fa8-d098-4250-b417-c577a01c5975-srv-cert\") pod \"catalog-operator-68c6474976-dn2nd\" (UID: \"3b6e2fa8-d098-4250-b417-c577a01c5975\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-dn2nd" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.138712 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.152439 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:27:42 crc kubenswrapper[4911]: E0929 21:27:42.152612 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 21:27:42.652571959 +0000 UTC m=+140.629684670 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.153899 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:42 crc kubenswrapper[4911]: E0929 21:27:42.154382 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 21:27:42.654363854 +0000 UTC m=+140.631476525 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8djg4" (UID: "4357f10f-dad3-4233-9d03-1cad6319e4a9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.158190 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.178969 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.188081 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/db0c906b-7d60-4048-ad1d-e9765282348f-config\") pod \"openshift-apiserver-operator-796bbdcf4f-cp2jw\" (UID: \"db0c906b-7d60-4048-ad1d-e9765282348f\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-cp2jw" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.198854 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.219408 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.231573 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/db0c906b-7d60-4048-ad1d-e9765282348f-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-cp2jw\" (UID: \"db0c906b-7d60-4048-ad1d-e9765282348f\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-cp2jw" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.238998 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.255772 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:27:42 crc kubenswrapper[4911]: E0929 21:27:42.256036 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 21:27:42.755997797 +0000 UTC m=+140.733110498 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.256672 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:42 crc kubenswrapper[4911]: E0929 21:27:42.257501 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 21:27:42.75743327 +0000 UTC m=+140.734545961 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8djg4" (UID: "4357f10f-dad3-4233-9d03-1cad6319e4a9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.259092 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.279395 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.282104 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2ad88e09-2e13-4070-9c1f-75cb9dd12ebf-config-volume\") pod \"collect-profiles-29319675-qdtss\" (UID: \"2ad88e09-2e13-4070-9c1f-75cb9dd12ebf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319675-qdtss" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.299910 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.319257 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.339017 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.359082 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.359301 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:27:42 crc kubenswrapper[4911]: E0929 21:27:42.359567 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 21:27:42.859531518 +0000 UTC m=+140.836644189 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.361138 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:42 crc kubenswrapper[4911]: E0929 21:27:42.361941 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 21:27:42.8619103 +0000 UTC m=+140.839023081 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8djg4" (UID: "4357f10f-dad3-4233-9d03-1cad6319e4a9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.372114 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/03fb6356-2be5-4caa-b804-5b44d2b52c61-signing-key\") pod \"service-ca-9c57cc56f-dxnj8\" (UID: \"03fb6356-2be5-4caa-b804-5b44d2b52c61\") " pod="openshift-service-ca/service-ca-9c57cc56f-dxnj8" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.379683 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.388762 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/03fb6356-2be5-4caa-b804-5b44d2b52c61-signing-cabundle\") pod \"service-ca-9c57cc56f-dxnj8\" (UID: \"03fb6356-2be5-4caa-b804-5b44d2b52c61\") " pod="openshift-service-ca/service-ca-9c57cc56f-dxnj8" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.398933 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.419007 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.429933 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/8fd16ad3-3d7f-4b84-baf5-bb7a3dd07cd9-images\") pod \"machine-config-operator-74547568cd-nh5bz\" (UID: \"8fd16ad3-3d7f-4b84-baf5-bb7a3dd07cd9\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nh5bz" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.439313 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.460664 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.463100 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:27:42 crc kubenswrapper[4911]: E0929 21:27:42.463323 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 21:27:42.963293415 +0000 UTC m=+140.940406086 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.464221 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:42 crc kubenswrapper[4911]: E0929 21:27:42.465580 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 21:27:42.965535264 +0000 UTC m=+140.942647975 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8djg4" (UID: "4357f10f-dad3-4233-9d03-1cad6319e4a9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.473132 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/8fd16ad3-3d7f-4b84-baf5-bb7a3dd07cd9-proxy-tls\") pod \"machine-config-operator-74547568cd-nh5bz\" (UID: \"8fd16ad3-3d7f-4b84-baf5-bb7a3dd07cd9\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nh5bz" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.478345 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.486260 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/9ff35d74-5cd5-4d25-bb50-8302240285b1-webhook-cert\") pod \"packageserver-d55dfcdfc-pjbd8\" (UID: \"9ff35d74-5cd5-4d25-bb50-8302240285b1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pjbd8" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.488601 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/9ff35d74-5cd5-4d25-bb50-8302240285b1-apiservice-cert\") pod \"packageserver-d55dfcdfc-pjbd8\" (UID: \"9ff35d74-5cd5-4d25-bb50-8302240285b1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pjbd8" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.501254 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.520155 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.535685 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/49f9ff74-e395-4c2a-b467-c248ab4ae4bb-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-s62fl\" (UID: \"49f9ff74-e395-4c2a-b467-c248ab4ae4bb\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-s62fl" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.538617 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.543315 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/49f9ff74-e395-4c2a-b467-c248ab4ae4bb-config\") pod \"kube-controller-manager-operator-78b949d7b-s62fl\" (UID: \"49f9ff74-e395-4c2a-b467-c248ab4ae4bb\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-s62fl" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.557901 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.566752 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:27:42 crc kubenswrapper[4911]: E0929 21:27:42.567139 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 21:27:43.067086894 +0000 UTC m=+141.044199605 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.567312 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:42 crc kubenswrapper[4911]: E0929 21:27:42.567902 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 21:27:43.067883938 +0000 UTC m=+141.044996659 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8djg4" (UID: "4357f10f-dad3-4233-9d03-1cad6319e4a9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.579100 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.595174 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/639123e9-fc56-4554-a59a-0d2c3866c340-metrics-tls\") pod \"ingress-operator-5b745b69d9-5f6q5\" (UID: \"639123e9-fc56-4554-a59a-0d2c3866c340\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5f6q5" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.596441 4911 request.go:700] Waited for 1.016050885s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-ingress-operator/secrets?fieldSelector=metadata.name%3Dingress-operator-dockercfg-7lnqk&limit=500&resourceVersion=0 Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.598074 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.627657 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.638850 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.640303 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/639123e9-fc56-4554-a59a-0d2c3866c340-trusted-ca\") pod \"ingress-operator-5b745b69d9-5f6q5\" (UID: \"639123e9-fc56-4554-a59a-0d2c3866c340\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5f6q5" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.659199 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.670482 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:27:42 crc kubenswrapper[4911]: E0929 21:27:42.670714 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 21:27:43.170677657 +0000 UTC m=+141.147790328 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.671217 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:42 crc kubenswrapper[4911]: E0929 21:27:42.671947 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 21:27:43.171937714 +0000 UTC m=+141.149050385 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8djg4" (UID: "4357f10f-dad3-4233-9d03-1cad6319e4a9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.679742 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.698876 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.712291 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d3eae054-c8ac-4d16-a058-8a724a8a6d0f-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-mmgf6\" (UID: \"d3eae054-c8ac-4d16-a058-8a724a8a6d0f\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-mmgf6" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.719640 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.739236 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.742308 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d3eae054-c8ac-4d16-a058-8a724a8a6d0f-config\") pod \"kube-apiserver-operator-766d6c64bb-mmgf6\" (UID: \"d3eae054-c8ac-4d16-a058-8a724a8a6d0f\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-mmgf6" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.759516 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.773940 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/72e24945-9968-4fe1-acd3-84d4c0e6099f-srv-cert\") pod \"olm-operator-6b444d44fb-8gplk\" (UID: \"72e24945-9968-4fe1-acd3-84d4c0e6099f\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8gplk" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.774133 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:27:42 crc kubenswrapper[4911]: E0929 21:27:42.774298 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 21:27:43.274271129 +0000 UTC m=+141.251383800 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.776070 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:42 crc kubenswrapper[4911]: E0929 21:27:42.776865 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 21:27:43.276775034 +0000 UTC m=+141.253887735 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8djg4" (UID: "4357f10f-dad3-4233-9d03-1cad6319e4a9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.782723 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.795744 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/bead4097-1138-4381-9884-93bbf059b717-stats-auth\") pod \"router-default-5444994796-gx2fb\" (UID: \"bead4097-1138-4381-9884-93bbf059b717\") " pod="openshift-ingress/router-default-5444994796-gx2fb" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.799173 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.818448 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.820434 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/bead4097-1138-4381-9884-93bbf059b717-service-ca-bundle\") pod \"router-default-5444994796-gx2fb\" (UID: \"bead4097-1138-4381-9884-93bbf059b717\") " pod="openshift-ingress/router-default-5444994796-gx2fb" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.838777 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.854551 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/bead4097-1138-4381-9884-93bbf059b717-default-certificate\") pod \"router-default-5444994796-gx2fb\" (UID: \"bead4097-1138-4381-9884-93bbf059b717\") " pod="openshift-ingress/router-default-5444994796-gx2fb" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.859199 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.865142 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/bead4097-1138-4381-9884-93bbf059b717-metrics-certs\") pod \"router-default-5444994796-gx2fb\" (UID: \"bead4097-1138-4381-9884-93bbf059b717\") " pod="openshift-ingress/router-default-5444994796-gx2fb" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.877383 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:27:42 crc kubenswrapper[4911]: E0929 21:27:42.877531 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 21:27:43.377510391 +0000 UTC m=+141.354623082 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.879420 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.879870 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:42 crc kubenswrapper[4911]: E0929 21:27:42.880254 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 21:27:43.380245254 +0000 UTC m=+141.357357925 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8djg4" (UID: "4357f10f-dad3-4233-9d03-1cad6319e4a9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.898934 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.918955 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.924871 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/a3def338-7395-4682-81be-d3e671abf2ed-proxy-tls\") pod \"machine-config-controller-84d6567774-6rs7d\" (UID: \"a3def338-7395-4682-81be-d3e671abf2ed\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6rs7d" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.939290 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.958633 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.979869 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.980704 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:27:42 crc kubenswrapper[4911]: E0929 21:27:42.980996 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 21:27:43.480962479 +0000 UTC m=+141.458075190 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.982230 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:42 crc kubenswrapper[4911]: E0929 21:27:42.982699 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 21:27:43.482688012 +0000 UTC m=+141.459800693 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8djg4" (UID: "4357f10f-dad3-4233-9d03-1cad6319e4a9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:42 crc kubenswrapper[4911]: I0929 21:27:42.998653 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.000962 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d3db165e-e40f-4731-9bc8-518b8bf79f14-config\") pod \"service-ca-operator-777779d784-wslbb\" (UID: \"d3db165e-e40f-4731-9bc8-518b8bf79f14\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wslbb" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.018869 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.038559 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Sep 29 21:27:43 crc kubenswrapper[4911]: E0929 21:27:43.046962 4911 secret.go:188] Couldn't get secret openshift-etcd-operator/etcd-client: failed to sync secret cache: timed out waiting for the condition Sep 29 21:27:43 crc kubenswrapper[4911]: E0929 21:27:43.046992 4911 configmap.go:193] Couldn't get configMap openshift-etcd-operator/etcd-operator-config: failed to sync configmap cache: timed out waiting for the condition Sep 29 21:27:43 crc kubenswrapper[4911]: E0929 21:27:43.047051 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3636d93a-cce9-4086-9cdc-b907988e1ff8-etcd-client podName:3636d93a-cce9-4086-9cdc-b907988e1ff8 nodeName:}" failed. No retries permitted until 2025-09-29 21:27:43.547021387 +0000 UTC m=+141.524134058 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etcd-client" (UniqueName: "kubernetes.io/secret/3636d93a-cce9-4086-9cdc-b907988e1ff8-etcd-client") pod "etcd-operator-b45778765-j9nwx" (UID: "3636d93a-cce9-4086-9cdc-b907988e1ff8") : failed to sync secret cache: timed out waiting for the condition Sep 29 21:27:43 crc kubenswrapper[4911]: E0929 21:27:43.047093 4911 configmap.go:193] Couldn't get configMap openshift-etcd-operator/etcd-ca-bundle: failed to sync configmap cache: timed out waiting for the condition Sep 29 21:27:43 crc kubenswrapper[4911]: E0929 21:27:43.047139 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/3636d93a-cce9-4086-9cdc-b907988e1ff8-config podName:3636d93a-cce9-4086-9cdc-b907988e1ff8 nodeName:}" failed. No retries permitted until 2025-09-29 21:27:43.547096189 +0000 UTC m=+141.524208900 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/3636d93a-cce9-4086-9cdc-b907988e1ff8-config") pod "etcd-operator-b45778765-j9nwx" (UID: "3636d93a-cce9-4086-9cdc-b907988e1ff8") : failed to sync configmap cache: timed out waiting for the condition Sep 29 21:27:43 crc kubenswrapper[4911]: E0929 21:27:43.047158 4911 configmap.go:193] Couldn't get configMap openshift-dns/dns-default: failed to sync configmap cache: timed out waiting for the condition Sep 29 21:27:43 crc kubenswrapper[4911]: E0929 21:27:43.047207 4911 secret.go:188] Couldn't get secret openshift-dns/dns-default-metrics-tls: failed to sync secret cache: timed out waiting for the condition Sep 29 21:27:43 crc kubenswrapper[4911]: E0929 21:27:43.047230 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/3636d93a-cce9-4086-9cdc-b907988e1ff8-etcd-ca podName:3636d93a-cce9-4086-9cdc-b907988e1ff8 nodeName:}" failed. No retries permitted until 2025-09-29 21:27:43.547200882 +0000 UTC m=+141.524313753 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etcd-ca" (UniqueName: "kubernetes.io/configmap/3636d93a-cce9-4086-9cdc-b907988e1ff8-etcd-ca") pod "etcd-operator-b45778765-j9nwx" (UID: "3636d93a-cce9-4086-9cdc-b907988e1ff8") : failed to sync configmap cache: timed out waiting for the condition Sep 29 21:27:43 crc kubenswrapper[4911]: E0929 21:27:43.047267 4911 secret.go:188] Couldn't get secret openshift-machine-config-operator/machine-config-server-tls: failed to sync secret cache: timed out waiting for the condition Sep 29 21:27:43 crc kubenswrapper[4911]: E0929 21:27:43.047284 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5d7dea83-ad4f-4f99-a281-1dcec6929b25-config-volume podName:5d7dea83-ad4f-4f99-a281-1dcec6929b25 nodeName:}" failed. No retries permitted until 2025-09-29 21:27:43.547246664 +0000 UTC m=+141.524359365 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-volume" (UniqueName: "kubernetes.io/configmap/5d7dea83-ad4f-4f99-a281-1dcec6929b25-config-volume") pod "dns-default-2pbvr" (UID: "5d7dea83-ad4f-4f99-a281-1dcec6929b25") : failed to sync configmap cache: timed out waiting for the condition Sep 29 21:27:43 crc kubenswrapper[4911]: E0929 21:27:43.047333 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5d7dea83-ad4f-4f99-a281-1dcec6929b25-metrics-tls podName:5d7dea83-ad4f-4f99-a281-1dcec6929b25 nodeName:}" failed. No retries permitted until 2025-09-29 21:27:43.547316136 +0000 UTC m=+141.524428977 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-tls" (UniqueName: "kubernetes.io/secret/5d7dea83-ad4f-4f99-a281-1dcec6929b25-metrics-tls") pod "dns-default-2pbvr" (UID: "5d7dea83-ad4f-4f99-a281-1dcec6929b25") : failed to sync secret cache: timed out waiting for the condition Sep 29 21:27:43 crc kubenswrapper[4911]: E0929 21:27:43.047786 4911 secret.go:188] Couldn't get secret openshift-ingress-canary/canary-serving-cert: failed to sync secret cache: timed out waiting for the condition Sep 29 21:27:43 crc kubenswrapper[4911]: E0929 21:27:43.047962 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/11c18a96-1e89-47a1-bae8-7184c53da82f-cert podName:11c18a96-1e89-47a1-bae8-7184c53da82f nodeName:}" failed. No retries permitted until 2025-09-29 21:27:43.547926684 +0000 UTC m=+141.525039445 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/11c18a96-1e89-47a1-bae8-7184c53da82f-cert") pod "ingress-canary-cqtn6" (UID: "11c18a96-1e89-47a1-bae8-7184c53da82f") : failed to sync secret cache: timed out waiting for the condition Sep 29 21:27:43 crc kubenswrapper[4911]: E0929 21:27:43.048218 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bb0f6f96-6c75-46c0-9a5e-78310761816e-certs podName:bb0f6f96-6c75-46c0-9a5e-78310761816e nodeName:}" failed. No retries permitted until 2025-09-29 21:27:43.548204273 +0000 UTC m=+141.525316944 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "certs" (UniqueName: "kubernetes.io/secret/bb0f6f96-6c75-46c0-9a5e-78310761816e-certs") pod "machine-config-server-q2dkc" (UID: "bb0f6f96-6c75-46c0-9a5e-78310761816e") : failed to sync secret cache: timed out waiting for the condition Sep 29 21:27:43 crc kubenswrapper[4911]: E0929 21:27:43.050166 4911 configmap.go:193] Couldn't get configMap openshift-etcd-operator/etcd-service-ca-bundle: failed to sync configmap cache: timed out waiting for the condition Sep 29 21:27:43 crc kubenswrapper[4911]: E0929 21:27:43.050236 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/3636d93a-cce9-4086-9cdc-b907988e1ff8-etcd-service-ca podName:3636d93a-cce9-4086-9cdc-b907988e1ff8 nodeName:}" failed. No retries permitted until 2025-09-29 21:27:43.550219954 +0000 UTC m=+141.527332625 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etcd-service-ca" (UniqueName: "kubernetes.io/configmap/3636d93a-cce9-4086-9cdc-b907988e1ff8-etcd-service-ca") pod "etcd-operator-b45778765-j9nwx" (UID: "3636d93a-cce9-4086-9cdc-b907988e1ff8") : failed to sync configmap cache: timed out waiting for the condition Sep 29 21:27:43 crc kubenswrapper[4911]: E0929 21:27:43.051362 4911 secret.go:188] Couldn't get secret openshift-etcd-operator/etcd-operator-serving-cert: failed to sync secret cache: timed out waiting for the condition Sep 29 21:27:43 crc kubenswrapper[4911]: E0929 21:27:43.051401 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3636d93a-cce9-4086-9cdc-b907988e1ff8-serving-cert podName:3636d93a-cce9-4086-9cdc-b907988e1ff8 nodeName:}" failed. No retries permitted until 2025-09-29 21:27:43.551390169 +0000 UTC m=+141.528502840 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "serving-cert" (UniqueName: "kubernetes.io/secret/3636d93a-cce9-4086-9cdc-b907988e1ff8-serving-cert") pod "etcd-operator-b45778765-j9nwx" (UID: "3636d93a-cce9-4086-9cdc-b907988e1ff8") : failed to sync secret cache: timed out waiting for the condition Sep 29 21:27:43 crc kubenswrapper[4911]: E0929 21:27:43.051419 4911 secret.go:188] Couldn't get secret openshift-machine-config-operator/node-bootstrapper-token: failed to sync secret cache: timed out waiting for the condition Sep 29 21:27:43 crc kubenswrapper[4911]: E0929 21:27:43.051442 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bb0f6f96-6c75-46c0-9a5e-78310761816e-node-bootstrap-token podName:bb0f6f96-6c75-46c0-9a5e-78310761816e nodeName:}" failed. No retries permitted until 2025-09-29 21:27:43.55143635 +0000 UTC m=+141.528549021 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "node-bootstrap-token" (UniqueName: "kubernetes.io/secret/bb0f6f96-6c75-46c0-9a5e-78310761816e-node-bootstrap-token") pod "machine-config-server-q2dkc" (UID: "bb0f6f96-6c75-46c0-9a5e-78310761816e") : failed to sync secret cache: timed out waiting for the condition Sep 29 21:27:43 crc kubenswrapper[4911]: E0929 21:27:43.051461 4911 secret.go:188] Couldn't get secret openshift-operator-lifecycle-manager/package-server-manager-serving-cert: failed to sync secret cache: timed out waiting for the condition Sep 29 21:27:43 crc kubenswrapper[4911]: E0929 21:27:43.051488 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9323b64c-27c9-45cc-92bf-520f640e5126-package-server-manager-serving-cert podName:9323b64c-27c9-45cc-92bf-520f640e5126 nodeName:}" failed. No retries permitted until 2025-09-29 21:27:43.551482602 +0000 UTC m=+141.528595273 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "package-server-manager-serving-cert" (UniqueName: "kubernetes.io/secret/9323b64c-27c9-45cc-92bf-520f640e5126-package-server-manager-serving-cert") pod "package-server-manager-789f6589d5-2gvvc" (UID: "9323b64c-27c9-45cc-92bf-520f640e5126") : failed to sync secret cache: timed out waiting for the condition Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.053737 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d3db165e-e40f-4731-9bc8-518b8bf79f14-serving-cert\") pod \"service-ca-operator-777779d784-wslbb\" (UID: \"d3db165e-e40f-4731-9bc8-518b8bf79f14\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wslbb" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.059332 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.078713 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.083756 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:27:43 crc kubenswrapper[4911]: E0929 21:27:43.083940 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 21:27:43.583914243 +0000 UTC m=+141.561026924 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.084446 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:43 crc kubenswrapper[4911]: E0929 21:27:43.084905 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 21:27:43.584893962 +0000 UTC m=+141.562006653 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8djg4" (UID: "4357f10f-dad3-4233-9d03-1cad6319e4a9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.100668 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.119285 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.138028 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.159177 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.179129 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.185452 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:27:43 crc kubenswrapper[4911]: E0929 21:27:43.185638 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 21:27:43.685603358 +0000 UTC m=+141.662716069 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.186595 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:43 crc kubenswrapper[4911]: E0929 21:27:43.187174 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 21:27:43.687158524 +0000 UTC m=+141.664271235 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8djg4" (UID: "4357f10f-dad3-4233-9d03-1cad6319e4a9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.199193 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.219190 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.238412 4911 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.258740 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.279919 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.288370 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:27:43 crc kubenswrapper[4911]: E0929 21:27:43.288545 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 21:27:43.78851073 +0000 UTC m=+141.765623411 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.288913 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:43 crc kubenswrapper[4911]: E0929 21:27:43.289611 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 21:27:43.789580182 +0000 UTC m=+141.766692883 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8djg4" (UID: "4357f10f-dad3-4233-9d03-1cad6319e4a9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.316349 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c75rb\" (UniqueName: \"kubernetes.io/projected/3f8faa64-f7f4-4ff5-a016-bb927dd0ec78-kube-api-access-c75rb\") pod \"apiserver-76f77b778f-rtrxr\" (UID: \"3f8faa64-f7f4-4ff5-a016-bb927dd0ec78\") " pod="openshift-apiserver/apiserver-76f77b778f-rtrxr" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.348971 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lmpqp\" (UniqueName: \"kubernetes.io/projected/d55a1ae9-2e28-49f6-904b-67a246fda7e6-kube-api-access-lmpqp\") pod \"machine-api-operator-5694c8668f-2flr8\" (UID: \"d55a1ae9-2e28-49f6-904b-67a246fda7e6\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-2flr8" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.363073 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vlfjd\" (UniqueName: \"kubernetes.io/projected/e66f4856-1a57-441f-9701-4f61008259c6-kube-api-access-vlfjd\") pod \"apiserver-7bbb656c7d-6sql5\" (UID: \"e66f4856-1a57-441f-9701-4f61008259c6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6sql5" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.378319 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.380424 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n66j4\" (UniqueName: \"kubernetes.io/projected/96f91b1a-e276-4bc1-9308-5375745c803c-kube-api-access-n66j4\") pod \"console-f9d7485db-szrp2\" (UID: \"96f91b1a-e276-4bc1-9308-5375745c803c\") " pod="openshift-console/console-f9d7485db-szrp2" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.390810 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:27:43 crc kubenswrapper[4911]: E0929 21:27:43.391025 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 21:27:43.890987858 +0000 UTC m=+141.868100529 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.391444 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:43 crc kubenswrapper[4911]: E0929 21:27:43.392435 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 21:27:43.892409902 +0000 UTC m=+141.869522603 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8djg4" (UID: "4357f10f-dad3-4233-9d03-1cad6319e4a9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.398768 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.412372 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-szrp2" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.418615 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.439262 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.458970 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.478983 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.493783 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:27:43 crc kubenswrapper[4911]: E0929 21:27:43.494142 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 21:27:43.994097477 +0000 UTC m=+141.971210208 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.494734 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:43 crc kubenswrapper[4911]: E0929 21:27:43.495362 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 21:27:43.995340714 +0000 UTC m=+141.972453565 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8djg4" (UID: "4357f10f-dad3-4233-9d03-1cad6319e4a9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.498635 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.513523 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-2flr8" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.518737 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.532408 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-rtrxr" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.539713 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.561713 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.596403 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.596620 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/11c18a96-1e89-47a1-bae8-7184c53da82f-cert\") pod \"ingress-canary-cqtn6\" (UID: \"11c18a96-1e89-47a1-bae8-7184c53da82f\") " pod="openshift-ingress-canary/ingress-canary-cqtn6" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.596861 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/3636d93a-cce9-4086-9cdc-b907988e1ff8-etcd-service-ca\") pod \"etcd-operator-b45778765-j9nwx\" (UID: \"3636d93a-cce9-4086-9cdc-b907988e1ff8\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j9nwx" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.596971 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/9323b64c-27c9-45cc-92bf-520f640e5126-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-2gvvc\" (UID: \"9323b64c-27c9-45cc-92bf-520f640e5126\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2gvvc" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.596998 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/bb0f6f96-6c75-46c0-9a5e-78310761816e-node-bootstrap-token\") pod \"machine-config-server-q2dkc\" (UID: \"bb0f6f96-6c75-46c0-9a5e-78310761816e\") " pod="openshift-machine-config-operator/machine-config-server-q2dkc" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.597062 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3636d93a-cce9-4086-9cdc-b907988e1ff8-serving-cert\") pod \"etcd-operator-b45778765-j9nwx\" (UID: \"3636d93a-cce9-4086-9cdc-b907988e1ff8\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j9nwx" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.597141 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/3636d93a-cce9-4086-9cdc-b907988e1ff8-etcd-client\") pod \"etcd-operator-b45778765-j9nwx\" (UID: \"3636d93a-cce9-4086-9cdc-b907988e1ff8\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j9nwx" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.597179 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/3636d93a-cce9-4086-9cdc-b907988e1ff8-etcd-ca\") pod \"etcd-operator-b45778765-j9nwx\" (UID: \"3636d93a-cce9-4086-9cdc-b907988e1ff8\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j9nwx" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.597207 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3636d93a-cce9-4086-9cdc-b907988e1ff8-config\") pod \"etcd-operator-b45778765-j9nwx\" (UID: \"3636d93a-cce9-4086-9cdc-b907988e1ff8\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j9nwx" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.597304 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5d7dea83-ad4f-4f99-a281-1dcec6929b25-config-volume\") pod \"dns-default-2pbvr\" (UID: \"5d7dea83-ad4f-4f99-a281-1dcec6929b25\") " pod="openshift-dns/dns-default-2pbvr" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.597328 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/5d7dea83-ad4f-4f99-a281-1dcec6929b25-metrics-tls\") pod \"dns-default-2pbvr\" (UID: \"5d7dea83-ad4f-4f99-a281-1dcec6929b25\") " pod="openshift-dns/dns-default-2pbvr" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.597351 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/bb0f6f96-6c75-46c0-9a5e-78310761816e-certs\") pod \"machine-config-server-q2dkc\" (UID: \"bb0f6f96-6c75-46c0-9a5e-78310761816e\") " pod="openshift-machine-config-operator/machine-config-server-q2dkc" Sep 29 21:27:43 crc kubenswrapper[4911]: E0929 21:27:43.598864 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 21:27:44.098834744 +0000 UTC m=+142.075947415 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.599189 4911 request.go:700] Waited for 1.869238244s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-scheduler-operator/serviceaccounts/openshift-kube-scheduler-operator/token Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.599889 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5d7dea83-ad4f-4f99-a281-1dcec6929b25-config-volume\") pod \"dns-default-2pbvr\" (UID: \"5d7dea83-ad4f-4f99-a281-1dcec6929b25\") " pod="openshift-dns/dns-default-2pbvr" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.600175 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/3636d93a-cce9-4086-9cdc-b907988e1ff8-etcd-ca\") pod \"etcd-operator-b45778765-j9nwx\" (UID: \"3636d93a-cce9-4086-9cdc-b907988e1ff8\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j9nwx" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.600717 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/3636d93a-cce9-4086-9cdc-b907988e1ff8-etcd-service-ca\") pod \"etcd-operator-b45778765-j9nwx\" (UID: \"3636d93a-cce9-4086-9cdc-b907988e1ff8\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j9nwx" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.601654 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3636d93a-cce9-4086-9cdc-b907988e1ff8-config\") pod \"etcd-operator-b45778765-j9nwx\" (UID: \"3636d93a-cce9-4086-9cdc-b907988e1ff8\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j9nwx" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.604201 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/11c18a96-1e89-47a1-bae8-7184c53da82f-cert\") pod \"ingress-canary-cqtn6\" (UID: \"11c18a96-1e89-47a1-bae8-7184c53da82f\") " pod="openshift-ingress-canary/ingress-canary-cqtn6" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.607156 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/bb0f6f96-6c75-46c0-9a5e-78310761816e-certs\") pod \"machine-config-server-q2dkc\" (UID: \"bb0f6f96-6c75-46c0-9a5e-78310761816e\") " pod="openshift-machine-config-operator/machine-config-server-q2dkc" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.607583 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/3636d93a-cce9-4086-9cdc-b907988e1ff8-etcd-client\") pod \"etcd-operator-b45778765-j9nwx\" (UID: \"3636d93a-cce9-4086-9cdc-b907988e1ff8\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j9nwx" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.608394 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/9323b64c-27c9-45cc-92bf-520f640e5126-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-2gvvc\" (UID: \"9323b64c-27c9-45cc-92bf-520f640e5126\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2gvvc" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.609400 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3636d93a-cce9-4086-9cdc-b907988e1ff8-serving-cert\") pod \"etcd-operator-b45778765-j9nwx\" (UID: \"3636d93a-cce9-4086-9cdc-b907988e1ff8\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j9nwx" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.610013 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/5d7dea83-ad4f-4f99-a281-1dcec6929b25-metrics-tls\") pod \"dns-default-2pbvr\" (UID: \"5d7dea83-ad4f-4f99-a281-1dcec6929b25\") " pod="openshift-dns/dns-default-2pbvr" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.611411 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9t2f8\" (UniqueName: \"kubernetes.io/projected/cd6c061e-79d1-4353-b077-69cb656a8823-kube-api-access-9t2f8\") pod \"openshift-config-operator-7777fb866f-c5pgg\" (UID: \"cd6c061e-79d1-4353-b077-69cb656a8823\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-c5pgg" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.611489 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/bb0f6f96-6c75-46c0-9a5e-78310761816e-node-bootstrap-token\") pod \"machine-config-server-q2dkc\" (UID: \"bb0f6f96-6c75-46c0-9a5e-78310761816e\") " pod="openshift-machine-config-operator/machine-config-server-q2dkc" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.617195 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9a381472-08f8-4263-b008-573df71b1605-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-2fdjj\" (UID: \"9a381472-08f8-4263-b008-573df71b1605\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2fdjj" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.641714 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-djxk9\" (UniqueName: \"kubernetes.io/projected/affba9da-62d0-47e6-b833-8b6c0e774fde-kube-api-access-djxk9\") pod \"controller-manager-879f6c89f-wb4m9\" (UID: \"affba9da-62d0-47e6-b833-8b6c0e774fde\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wb4m9" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.656527 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6sql5" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.660623 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-szrp2"] Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.676552 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sbjl5\" (UniqueName: \"kubernetes.io/projected/9a3253d3-c916-477b-82cd-7f7911bfc1b0-kube-api-access-sbjl5\") pod \"oauth-openshift-558db77b4-h9qcg\" (UID: \"9a3253d3-c916-477b-82cd-7f7911bfc1b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-h9qcg" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.679528 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rwc7t\" (UniqueName: \"kubernetes.io/projected/e1d74425-0991-4922-9b99-95bb5e1c596c-kube-api-access-rwc7t\") pod \"console-operator-58897d9998-f7vrh\" (UID: \"e1d74425-0991-4922-9b99-95bb5e1c596c\") " pod="openshift-console-operator/console-operator-58897d9998-f7vrh" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.732614 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/cce1e199-e6e7-41e1-b1b6-83f8d435c80e-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-k7lgs\" (UID: \"cce1e199-e6e7-41e1-b1b6-83f8d435c80e\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-k7lgs" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.734650 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2fdjj" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.737979 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:43 crc kubenswrapper[4911]: E0929 21:27:43.738473 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 21:27:44.238459116 +0000 UTC m=+142.215571777 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8djg4" (UID: "4357f10f-dad3-4233-9d03-1cad6319e4a9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.758343 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nlb4z\" (UniqueName: \"kubernetes.io/projected/a47f0c8e-aa7c-456e-a69d-9b3f4c02bfb8-kube-api-access-nlb4z\") pod \"cluster-samples-operator-665b6dd947-ms4bf\" (UID: \"a47f0c8e-aa7c-456e-a69d-9b3f4c02bfb8\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-ms4bf" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.765327 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5bqg2\" (UniqueName: \"kubernetes.io/projected/cce1e199-e6e7-41e1-b1b6-83f8d435c80e-kube-api-access-5bqg2\") pod \"cluster-image-registry-operator-dc59b4c8b-k7lgs\" (UID: \"cce1e199-e6e7-41e1-b1b6-83f8d435c80e\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-k7lgs" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.769184 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/4357f10f-dad3-4233-9d03-1cad6319e4a9-bound-sa-token\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.773304 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-h9qcg" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.775652 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4wlfq\" (UniqueName: \"kubernetes.io/projected/79387310-5596-4b5f-af33-0f6b8a9a40ff-kube-api-access-4wlfq\") pod \"dns-operator-744455d44c-dl4br\" (UID: \"79387310-5596-4b5f-af33-0f6b8a9a40ff\") " pod="openshift-dns-operator/dns-operator-744455d44c-dl4br" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.779564 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-c5pgg" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.795580 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-46w52\" (UniqueName: \"kubernetes.io/projected/4357f10f-dad3-4233-9d03-1cad6319e4a9-kube-api-access-46w52\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.825658 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-2flr8"] Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.835231 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/639123e9-fc56-4554-a59a-0d2c3866c340-bound-sa-token\") pod \"ingress-operator-5b745b69d9-5f6q5\" (UID: \"639123e9-fc56-4554-a59a-0d2c3866c340\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5f6q5" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.839412 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:27:43 crc kubenswrapper[4911]: E0929 21:27:43.840087 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 21:27:44.340066918 +0000 UTC m=+142.317179589 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.868228 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b5lbq\" (UniqueName: \"kubernetes.io/projected/7362e2ef-cab6-4891-bb59-a7969b6e72b6-kube-api-access-b5lbq\") pod \"downloads-7954f5f757-mrbmd\" (UID: \"7362e2ef-cab6-4891-bb59-a7969b6e72b6\") " pod="openshift-console/downloads-7954f5f757-mrbmd" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.875733 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-wb4m9" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.882646 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8nn8m\" (UniqueName: \"kubernetes.io/projected/9ff35d74-5cd5-4d25-bb50-8302240285b1-kube-api-access-8nn8m\") pod \"packageserver-d55dfcdfc-pjbd8\" (UID: \"9ff35d74-5cd5-4d25-bb50-8302240285b1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pjbd8" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.894319 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g2zzr\" (UniqueName: \"kubernetes.io/projected/3636d93a-cce9-4086-9cdc-b907988e1ff8-kube-api-access-g2zzr\") pod \"etcd-operator-b45778765-j9nwx\" (UID: \"3636d93a-cce9-4086-9cdc-b907988e1ff8\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j9nwx" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.899152 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-rtrxr"] Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.919232 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c4xks\" (UniqueName: \"kubernetes.io/projected/03fb6356-2be5-4caa-b804-5b44d2b52c61-kube-api-access-c4xks\") pod \"service-ca-9c57cc56f-dxnj8\" (UID: \"03fb6356-2be5-4caa-b804-5b44d2b52c61\") " pod="openshift-service-ca/service-ca-9c57cc56f-dxnj8" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.940635 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-chcgd\" (UniqueName: \"kubernetes.io/projected/a3def338-7395-4682-81be-d3e671abf2ed-kube-api-access-chcgd\") pod \"machine-config-controller-84d6567774-6rs7d\" (UID: \"a3def338-7395-4682-81be-d3e671abf2ed\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6rs7d" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.941513 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:43 crc kubenswrapper[4911]: E0929 21:27:43.941971 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 21:27:44.441955519 +0000 UTC m=+142.419068190 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8djg4" (UID: "4357f10f-dad3-4233-9d03-1cad6319e4a9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.952711 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-6sql5"] Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.954347 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-dxnj8" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.962069 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f47px\" (UniqueName: \"kubernetes.io/projected/3b6e2fa8-d098-4250-b417-c577a01c5975-kube-api-access-f47px\") pod \"catalog-operator-68c6474976-dn2nd\" (UID: \"3b6e2fa8-d098-4250-b417-c577a01c5975\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-dn2nd" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.968217 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-f7vrh" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.971500 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pjbd8" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.980927 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-ms4bf" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.981212 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k5q5w\" (UniqueName: \"kubernetes.io/projected/11c18a96-1e89-47a1-bae8-7184c53da82f-kube-api-access-k5q5w\") pod \"ingress-canary-cqtn6\" (UID: \"11c18a96-1e89-47a1-bae8-7184c53da82f\") " pod="openshift-ingress-canary/ingress-canary-cqtn6" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.998709 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kdhfg\" (UniqueName: \"kubernetes.io/projected/2ad88e09-2e13-4070-9c1f-75cb9dd12ebf-kube-api-access-kdhfg\") pod \"collect-profiles-29319675-qdtss\" (UID: \"2ad88e09-2e13-4070-9c1f-75cb9dd12ebf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319675-qdtss" Sep 29 21:27:43 crc kubenswrapper[4911]: I0929 21:27:43.999142 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2fdjj"] Sep 29 21:27:44 crc kubenswrapper[4911]: W0929 21:27:44.001576 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode66f4856_1a57_441f_9701_4f61008259c6.slice/crio-726fb83dae2b222bbfe38f3d926ecddfe7ff8d911fb820fa400dab34e36c21c7 WatchSource:0}: Error finding container 726fb83dae2b222bbfe38f3d926ecddfe7ff8d911fb820fa400dab34e36c21c7: Status 404 returned error can't find the container with id 726fb83dae2b222bbfe38f3d926ecddfe7ff8d911fb820fa400dab34e36c21c7 Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.010840 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6rs7d" Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.014610 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4679p\" (UniqueName: \"kubernetes.io/projected/bead4097-1138-4381-9884-93bbf059b717-kube-api-access-4679p\") pod \"router-default-5444994796-gx2fb\" (UID: \"bead4097-1138-4381-9884-93bbf059b717\") " pod="openshift-ingress/router-default-5444994796-gx2fb" Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.023077 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-k7lgs" Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.033699 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-j9nwx" Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.036024 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-dl4br" Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.039855 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qkvzp\" (UniqueName: \"kubernetes.io/projected/21e1d3e8-e4aa-43d0-a5c8-61d2dba4a7af-kube-api-access-qkvzp\") pod \"multus-admission-controller-857f4d67dd-nwjdw\" (UID: \"21e1d3e8-e4aa-43d0-a5c8-61d2dba4a7af\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-nwjdw" Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.043494 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:27:44 crc kubenswrapper[4911]: E0929 21:27:44.044050 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 21:27:44.544034277 +0000 UTC m=+142.521146938 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.052360 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nwjfh\" (UniqueName: \"kubernetes.io/projected/a7232476-6ca7-47a3-8a5d-2d0e26afebdc-kube-api-access-nwjfh\") pod \"openshift-controller-manager-operator-756b6f6bc6-5bjvj\" (UID: \"a7232476-6ca7-47a3-8a5d-2d0e26afebdc\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-5bjvj" Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.068369 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-h9qcg"] Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.081115 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2nj55\" (UniqueName: \"kubernetes.io/projected/d4fc0a96-1ce0-4e5d-a743-3256db5295bb-kube-api-access-2nj55\") pod \"migrator-59844c95c7-rlm59\" (UID: \"d4fc0a96-1ce0-4e5d-a743-3256db5295bb\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-rlm59" Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.094554 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-mrbmd" Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.096621 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-cqtn6" Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.098350 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d3eae054-c8ac-4d16-a058-8a724a8a6d0f-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-mmgf6\" (UID: \"d3eae054-c8ac-4d16-a058-8a724a8a6d0f\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-mmgf6" Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.120874 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-c5pgg"] Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.122386 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dmc2r\" (UniqueName: \"kubernetes.io/projected/3b34b90b-583c-444d-921c-0d5fa13835d6-kube-api-access-dmc2r\") pod \"authentication-operator-69f744f599-rd6cq\" (UID: \"3b34b90b-583c-444d-921c-0d5fa13835d6\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rd6cq" Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.145833 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:44 crc kubenswrapper[4911]: E0929 21:27:44.146285 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 21:27:44.646269418 +0000 UTC m=+142.623382089 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8djg4" (UID: "4357f10f-dad3-4233-9d03-1cad6319e4a9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.158620 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-md58v\" (UniqueName: \"kubernetes.io/projected/d3db165e-e40f-4731-9bc8-518b8bf79f14-kube-api-access-md58v\") pod \"service-ca-operator-777779d784-wslbb\" (UID: \"d3db165e-e40f-4731-9bc8-518b8bf79f14\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wslbb" Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.185330 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-5bjvj" Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.190342 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vqwwf\" (UniqueName: \"kubernetes.io/projected/9da83d12-76d2-4194-a344-a1453b536a27-kube-api-access-vqwwf\") pod \"kube-storage-version-migrator-operator-b67b599dd-zqthg\" (UID: \"9da83d12-76d2-4194-a344-a1453b536a27\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-zqthg" Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.195551 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9x5rk\" (UniqueName: \"kubernetes.io/projected/9323b64c-27c9-45cc-92bf-520f640e5126-kube-api-access-9x5rk\") pod \"package-server-manager-789f6589d5-2gvvc\" (UID: \"9323b64c-27c9-45cc-92bf-520f640e5126\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2gvvc" Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.206608 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nfd6l\" (UniqueName: \"kubernetes.io/projected/bb0f6f96-6c75-46c0-9a5e-78310761816e-kube-api-access-nfd6l\") pod \"machine-config-server-q2dkc\" (UID: \"bb0f6f96-6c75-46c0-9a5e-78310761816e\") " pod="openshift-machine-config-operator/machine-config-server-q2dkc" Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.221154 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-nwjdw" Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.224413 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-rlm59" Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.233875 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-dn2nd" Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.249474 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-96xkl\" (UniqueName: \"kubernetes.io/projected/b6d3166a-4bc1-416c-b6f4-68207a1b0ccb-kube-api-access-96xkl\") pod \"machine-approver-56656f9798-kxz4b\" (UID: \"b6d3166a-4bc1-416c-b6f4-68207a1b0ccb\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-kxz4b" Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.249816 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319675-qdtss" Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.250133 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:27:44 crc kubenswrapper[4911]: E0929 21:27:44.250345 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 21:27:44.750318984 +0000 UTC m=+142.727431655 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.250420 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:44 crc kubenswrapper[4911]: E0929 21:27:44.250950 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 21:27:44.750940153 +0000 UTC m=+142.728052824 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8djg4" (UID: "4357f10f-dad3-4233-9d03-1cad6319e4a9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.257818 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5mqw9\" (UniqueName: \"kubernetes.io/projected/72e24945-9968-4fe1-acd3-84d4c0e6099f-kube-api-access-5mqw9\") pod \"olm-operator-6b444d44fb-8gplk\" (UID: \"72e24945-9968-4fe1-acd3-84d4c0e6099f\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8gplk" Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.265596 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6w9x4\" (UniqueName: \"kubernetes.io/projected/36aba054-4229-40fb-8fd4-344cd9f61a40-kube-api-access-6w9x4\") pod \"control-plane-machine-set-operator-78cbb6b69f-bgnq2\" (UID: \"36aba054-4229-40fb-8fd4-344cd9f61a40\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-bgnq2" Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.285662 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/49f9ff74-e395-4c2a-b467-c248ab4ae4bb-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-s62fl\" (UID: \"49f9ff74-e395-4c2a-b467-c248ab4ae4bb\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-s62fl" Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.287154 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-mmgf6" Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.295559 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8gplk" Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.298463 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w5xkh\" (UniqueName: \"kubernetes.io/projected/639123e9-fc56-4554-a59a-0d2c3866c340-kube-api-access-w5xkh\") pod \"ingress-operator-5b745b69d9-5f6q5\" (UID: \"639123e9-fc56-4554-a59a-0d2c3866c340\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5f6q5" Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.301239 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-s62fl" Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.301937 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-gx2fb" Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.316897 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-wslbb" Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.323102 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xg5w7\" (UniqueName: \"kubernetes.io/projected/615ffb8b-fb38-488c-b326-df6086017073-kube-api-access-xg5w7\") pod \"marketplace-operator-79b997595-s8dhg\" (UID: \"615ffb8b-fb38-488c-b326-df6086017073\") " pod="openshift-marketplace/marketplace-operator-79b997595-s8dhg" Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.338291 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2gvvc" Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.344246 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tf22b\" (UniqueName: \"kubernetes.io/projected/5d7dea83-ad4f-4f99-a281-1dcec6929b25-kube-api-access-tf22b\") pod \"dns-default-2pbvr\" (UID: \"5d7dea83-ad4f-4f99-a281-1dcec6929b25\") " pod="openshift-dns/dns-default-2pbvr" Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.351608 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:27:44 crc kubenswrapper[4911]: E0929 21:27:44.351910 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 21:27:44.851885426 +0000 UTC m=+142.828998097 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.352016 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:44 crc kubenswrapper[4911]: E0929 21:27:44.353259 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 21:27:44.853251007 +0000 UTC m=+142.830363678 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8djg4" (UID: "4357f10f-dad3-4233-9d03-1cad6319e4a9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.358186 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vr22l\" (UniqueName: \"kubernetes.io/projected/cd47cdce-41f4-416c-9436-4c386c50eb9e-kube-api-access-vr22l\") pod \"route-controller-manager-6576b87f9c-2mk6j\" (UID: \"cd47cdce-41f4-416c-9436-4c386c50eb9e\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2mk6j" Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.376597 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-wb4m9"] Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.384143 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-f7vrh"] Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.386987 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-rd6cq" Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.387555 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rppr2\" (UniqueName: \"kubernetes.io/projected/db0c906b-7d60-4048-ad1d-e9765282348f-kube-api-access-rppr2\") pod \"openshift-apiserver-operator-796bbdcf4f-cp2jw\" (UID: \"db0c906b-7d60-4048-ad1d-e9765282348f\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-cp2jw" Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.403875 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bkw58\" (UniqueName: \"kubernetes.io/projected/ae2112f8-47a1-4faf-8ee6-83f96c5a3def-kube-api-access-bkw58\") pod \"csi-hostpathplugin-grwjm\" (UID: \"ae2112f8-47a1-4faf-8ee6-83f96c5a3def\") " pod="hostpath-provisioner/csi-hostpathplugin-grwjm" Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.408495 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-kxz4b" Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.418823 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-2pbvr" Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.421814 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-zqthg" Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.424500 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-q2dkc" Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.425195 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-dxnj8"] Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.427468 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bpvlm\" (UniqueName: \"kubernetes.io/projected/8fd16ad3-3d7f-4b84-baf5-bb7a3dd07cd9-kube-api-access-bpvlm\") pod \"machine-config-operator-74547568cd-nh5bz\" (UID: \"8fd16ad3-3d7f-4b84-baf5-bb7a3dd07cd9\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nh5bz" Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.454694 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pjbd8"] Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.462006 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:27:44 crc kubenswrapper[4911]: E0929 21:27:44.462447 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 21:27:44.962424228 +0000 UTC m=+142.939536909 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.462654 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:44 crc kubenswrapper[4911]: E0929 21:27:44.463418 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 21:27:44.963378087 +0000 UTC m=+142.940490758 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8djg4" (UID: "4357f10f-dad3-4233-9d03-1cad6319e4a9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.491630 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-bgnq2" Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.498734 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-s8dhg" Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.507545 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2mk6j" Sep 29 21:27:44 crc kubenswrapper[4911]: W0929 21:27:44.530162 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode1d74425_0991_4922_9b99_95bb5e1c596c.slice/crio-19806f2d6634fda82a49359f64a023cbe37e9987bb692be0a039286c0879de82 WatchSource:0}: Error finding container 19806f2d6634fda82a49359f64a023cbe37e9987bb692be0a039286c0879de82: Status 404 returned error can't find the container with id 19806f2d6634fda82a49359f64a023cbe37e9987bb692be0a039286c0879de82 Sep 29 21:27:44 crc kubenswrapper[4911]: W0929 21:27:44.534257 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaffba9da_62d0_47e6_b833_8b6c0e774fde.slice/crio-8a3ec9a3bbea7a01ea52942466afb3a981c6471aec455a56e4bedd91f3378007 WatchSource:0}: Error finding container 8a3ec9a3bbea7a01ea52942466afb3a981c6471aec455a56e4bedd91f3378007: Status 404 returned error can't find the container with id 8a3ec9a3bbea7a01ea52942466afb3a981c6471aec455a56e4bedd91f3378007 Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.538407 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-cp2jw" Sep 29 21:27:44 crc kubenswrapper[4911]: W0929 21:27:44.554248 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9ff35d74_5cd5_4d25_bb50_8302240285b1.slice/crio-2ce5142aa7337e5f00e6162ee49f00976bab5a8c1ff7d895fb4a3731eb67e403 WatchSource:0}: Error finding container 2ce5142aa7337e5f00e6162ee49f00976bab5a8c1ff7d895fb4a3731eb67e403: Status 404 returned error can't find the container with id 2ce5142aa7337e5f00e6162ee49f00976bab5a8c1ff7d895fb4a3731eb67e403 Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.563137 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nh5bz" Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.564223 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:27:44 crc kubenswrapper[4911]: E0929 21:27:44.565069 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 21:27:45.06500982 +0000 UTC m=+143.042122501 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.577671 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5f6q5" Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.608610 4911 generic.go:334] "Generic (PLEG): container finished" podID="3f8faa64-f7f4-4ff5-a016-bb927dd0ec78" containerID="68680f050cef37c20bb80ae3f8cf02a021e9a54418bf75f4b02b14a7362431a2" exitCode=0 Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.608734 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-rtrxr" event={"ID":"3f8faa64-f7f4-4ff5-a016-bb927dd0ec78","Type":"ContainerDied","Data":"68680f050cef37c20bb80ae3f8cf02a021e9a54418bf75f4b02b14a7362431a2"} Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.608776 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-rtrxr" event={"ID":"3f8faa64-f7f4-4ff5-a016-bb927dd0ec78","Type":"ContainerStarted","Data":"0a2684588760aa0ea0b280f9a906ba92ba79cfe5b4a50ac0e9e3f9b43aa4c70b"} Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.617109 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2fdjj" event={"ID":"9a381472-08f8-4263-b008-573df71b1605","Type":"ContainerStarted","Data":"54b9ec4eb8bf03e86a94c0bfb33f6903c5288a58d6e43788c1c7f50f1f0a5d82"} Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.631991 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-szrp2" event={"ID":"96f91b1a-e276-4bc1-9308-5375745c803c","Type":"ContainerStarted","Data":"e88a4340a9dfdea81cc1df61fbff85e7a579e63ba69b15b3a4fc8aa4a5e7dad7"} Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.632058 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-szrp2" event={"ID":"96f91b1a-e276-4bc1-9308-5375745c803c","Type":"ContainerStarted","Data":"c74ef1122b06529e8b44a70c1679d812075d29bb27f8a7112384540ab2b0ebd4"} Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.665924 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:44 crc kubenswrapper[4911]: E0929 21:27:44.666307 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 21:27:45.166289713 +0000 UTC m=+143.143402384 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8djg4" (UID: "4357f10f-dad3-4233-9d03-1cad6319e4a9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.670405 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pjbd8" event={"ID":"9ff35d74-5cd5-4d25-bb50-8302240285b1","Type":"ContainerStarted","Data":"2ce5142aa7337e5f00e6162ee49f00976bab5a8c1ff7d895fb4a3731eb67e403"} Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.689001 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6sql5" event={"ID":"e66f4856-1a57-441f-9701-4f61008259c6","Type":"ContainerStarted","Data":"726fb83dae2b222bbfe38f3d926ecddfe7ff8d911fb820fa400dab34e36c21c7"} Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.694871 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-grwjm" Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.742464 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-h9qcg" event={"ID":"9a3253d3-c916-477b-82cd-7f7911bfc1b0","Type":"ContainerStarted","Data":"828df8fd7f7e521d4f1032ac6bcaf4d5d5939bd5d84671ca8dcd6d923cc4fe67"} Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.748365 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-2flr8" event={"ID":"d55a1ae9-2e28-49f6-904b-67a246fda7e6","Type":"ContainerStarted","Data":"31879fa79f78189eb8b650d3ed015e900a5773a633f9ce4b0a29676fcd079501"} Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.748437 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-2flr8" event={"ID":"d55a1ae9-2e28-49f6-904b-67a246fda7e6","Type":"ContainerStarted","Data":"09ad8c14abaf9187178fee4aa73cf1645cb926b204df605f7ae668b85e0f23df"} Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.748451 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-2flr8" event={"ID":"d55a1ae9-2e28-49f6-904b-67a246fda7e6","Type":"ContainerStarted","Data":"2189f3c7b5bff8f45ca0173e03f9b554bea9b213b9a6610a07b17a7efa10479a"} Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.755690 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-dxnj8" event={"ID":"03fb6356-2be5-4caa-b804-5b44d2b52c61","Type":"ContainerStarted","Data":"df219ed2442729c31b99f9a2ddb2d40eea102b33dbc9f4e84f895dfa429187ee"} Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.759072 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-f7vrh" event={"ID":"e1d74425-0991-4922-9b99-95bb5e1c596c","Type":"ContainerStarted","Data":"19806f2d6634fda82a49359f64a023cbe37e9987bb692be0a039286c0879de82"} Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.762149 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-wb4m9" event={"ID":"affba9da-62d0-47e6-b833-8b6c0e774fde","Type":"ContainerStarted","Data":"8a3ec9a3bbea7a01ea52942466afb3a981c6471aec455a56e4bedd91f3378007"} Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.765957 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-c5pgg" event={"ID":"cd6c061e-79d1-4353-b077-69cb656a8823","Type":"ContainerStarted","Data":"e716fd758c620050340d193bbc3118167d559e70cb592c09f0e7823832358c24"} Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.771684 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:27:44 crc kubenswrapper[4911]: E0929 21:27:44.772699 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 21:27:45.272658129 +0000 UTC m=+143.249770800 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.776712 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-ms4bf"] Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.776876 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:44 crc kubenswrapper[4911]: E0929 21:27:44.777225 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 21:27:45.277210737 +0000 UTC m=+143.254323408 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8djg4" (UID: "4357f10f-dad3-4233-9d03-1cad6319e4a9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.794253 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-j9nwx"] Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.813120 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-dl4br"] Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.815891 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-6rs7d"] Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.879430 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:27:44 crc kubenswrapper[4911]: E0929 21:27:44.880042 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 21:27:45.380027356 +0000 UTC m=+143.357140027 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:44 crc kubenswrapper[4911]: W0929 21:27:44.922247 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb6d3166a_4bc1_416c_b6f4_68207a1b0ccb.slice/crio-7c0dfcb9d33e075307bfa99a944c584bf1c99581dabbb8c8658341545140f262 WatchSource:0}: Error finding container 7c0dfcb9d33e075307bfa99a944c584bf1c99581dabbb8c8658341545140f262: Status 404 returned error can't find the container with id 7c0dfcb9d33e075307bfa99a944c584bf1c99581dabbb8c8658341545140f262 Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.972597 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-dn2nd"] Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.981333 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:44 crc kubenswrapper[4911]: E0929 21:27:44.981726 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 21:27:45.481712921 +0000 UTC m=+143.458825592 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8djg4" (UID: "4357f10f-dad3-4233-9d03-1cad6319e4a9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.988927 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-2flr8" podStartSLOduration=121.988897179 podStartE2EDuration="2m1.988897179s" podCreationTimestamp="2025-09-29 21:25:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:27:44.973393959 +0000 UTC m=+142.950506650" watchObservedRunningTime="2025-09-29 21:27:44.988897179 +0000 UTC m=+142.966009890" Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.990843 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-5bjvj"] Sep 29 21:27:44 crc kubenswrapper[4911]: I0929 21:27:44.994678 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319675-qdtss"] Sep 29 21:27:45 crc kubenswrapper[4911]: W0929 21:27:45.034530 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbb0f6f96_6c75_46c0_9a5e_78310761816e.slice/crio-b03aca1ff2c87b1b859c1097691dc9e8db802f890f771bfbfafe2c3410d2b6c1 WatchSource:0}: Error finding container b03aca1ff2c87b1b859c1097691dc9e8db802f890f771bfbfafe2c3410d2b6c1: Status 404 returned error can't find the container with id b03aca1ff2c87b1b859c1097691dc9e8db802f890f771bfbfafe2c3410d2b6c1 Sep 29 21:27:45 crc kubenswrapper[4911]: I0929 21:27:45.082408 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:27:45 crc kubenswrapper[4911]: E0929 21:27:45.082670 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 21:27:45.582626982 +0000 UTC m=+143.559739653 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:45 crc kubenswrapper[4911]: W0929 21:27:45.108137 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2ad88e09_2e13_4070_9c1f_75cb9dd12ebf.slice/crio-f08a8994a1230c8c4a574e0226b25cc826ec56db3dc5e5bc0ab10cdbca2c84b1 WatchSource:0}: Error finding container f08a8994a1230c8c4a574e0226b25cc826ec56db3dc5e5bc0ab10cdbca2c84b1: Status 404 returned error can't find the container with id f08a8994a1230c8c4a574e0226b25cc826ec56db3dc5e5bc0ab10cdbca2c84b1 Sep 29 21:27:45 crc kubenswrapper[4911]: I0929 21:27:45.184331 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:45 crc kubenswrapper[4911]: E0929 21:27:45.184843 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 21:27:45.684819933 +0000 UTC m=+143.661932604 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8djg4" (UID: "4357f10f-dad3-4233-9d03-1cad6319e4a9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:45 crc kubenswrapper[4911]: I0929 21:27:45.286563 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:27:45 crc kubenswrapper[4911]: E0929 21:27:45.287701 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 21:27:45.787682984 +0000 UTC m=+143.764795655 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:45 crc kubenswrapper[4911]: I0929 21:27:45.389753 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:45 crc kubenswrapper[4911]: E0929 21:27:45.393334 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 21:27:45.893315928 +0000 UTC m=+143.870428599 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8djg4" (UID: "4357f10f-dad3-4233-9d03-1cad6319e4a9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:45 crc kubenswrapper[4911]: I0929 21:27:45.398953 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-k7lgs"] Sep 29 21:27:45 crc kubenswrapper[4911]: I0929 21:27:45.408116 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-rlm59"] Sep 29 21:27:45 crc kubenswrapper[4911]: I0929 21:27:45.497876 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:27:45 crc kubenswrapper[4911]: E0929 21:27:45.498635 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 21:27:45.998590531 +0000 UTC m=+143.975703212 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:45 crc kubenswrapper[4911]: I0929 21:27:45.500850 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:45 crc kubenswrapper[4911]: E0929 21:27:45.501934 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 21:27:46.001920512 +0000 UTC m=+143.979033183 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8djg4" (UID: "4357f10f-dad3-4233-9d03-1cad6319e4a9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:45 crc kubenswrapper[4911]: I0929 21:27:45.557472 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-s62fl"] Sep 29 21:27:45 crc kubenswrapper[4911]: I0929 21:27:45.564114 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-cqtn6"] Sep 29 21:27:45 crc kubenswrapper[4911]: I0929 21:27:45.597174 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2gvvc"] Sep 29 21:27:45 crc kubenswrapper[4911]: I0929 21:27:45.602764 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:27:45 crc kubenswrapper[4911]: E0929 21:27:45.603431 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 21:27:46.10339628 +0000 UTC m=+144.080508951 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:45 crc kubenswrapper[4911]: I0929 21:27:45.605101 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:45 crc kubenswrapper[4911]: E0929 21:27:45.605600 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 21:27:46.105577306 +0000 UTC m=+144.082689967 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8djg4" (UID: "4357f10f-dad3-4233-9d03-1cad6319e4a9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:45 crc kubenswrapper[4911]: W0929 21:27:45.637993 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod11c18a96_1e89_47a1_bae8_7184c53da82f.slice/crio-621cad6a96aeff1698624be47bcf7aabb36ba36a1fd5fd9cc1c0b9c1c2f6cbb5 WatchSource:0}: Error finding container 621cad6a96aeff1698624be47bcf7aabb36ba36a1fd5fd9cc1c0b9c1c2f6cbb5: Status 404 returned error can't find the container with id 621cad6a96aeff1698624be47bcf7aabb36ba36a1fd5fd9cc1c0b9c1c2f6cbb5 Sep 29 21:27:45 crc kubenswrapper[4911]: I0929 21:27:45.709908 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:27:45 crc kubenswrapper[4911]: E0929 21:27:45.719021 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 21:27:46.218436899 +0000 UTC m=+144.195549580 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:45 crc kubenswrapper[4911]: I0929 21:27:45.761568 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-mrbmd"] Sep 29 21:27:45 crc kubenswrapper[4911]: I0929 21:27:45.780387 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-s8dhg"] Sep 29 21:27:45 crc kubenswrapper[4911]: I0929 21:27:45.809941 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-dxnj8" event={"ID":"03fb6356-2be5-4caa-b804-5b44d2b52c61","Type":"ContainerStarted","Data":"79b9a69a0ea58404b0219207b0d9bc7195a3ef83067487571ceb7ff83f8983f3"} Sep 29 21:27:45 crc kubenswrapper[4911]: I0929 21:27:45.810007 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-wslbb"] Sep 29 21:27:45 crc kubenswrapper[4911]: I0929 21:27:45.813438 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-rlm59" event={"ID":"d4fc0a96-1ce0-4e5d-a743-3256db5295bb","Type":"ContainerStarted","Data":"e66d500e4adf9f961c6bd1d22c48bb0b901496c491867538fd2964d193553cfa"} Sep 29 21:27:45 crc kubenswrapper[4911]: I0929 21:27:45.819179 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:45 crc kubenswrapper[4911]: E0929 21:27:45.819547 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 21:27:46.319532686 +0000 UTC m=+144.296645357 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8djg4" (UID: "4357f10f-dad3-4233-9d03-1cad6319e4a9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:45 crc kubenswrapper[4911]: I0929 21:27:45.821874 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-nwjdw"] Sep 29 21:27:45 crc kubenswrapper[4911]: I0929 21:27:45.822919 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-dl4br" event={"ID":"79387310-5596-4b5f-af33-0f6b8a9a40ff","Type":"ContainerStarted","Data":"cc09b3206836afed82fcdc4282dfe9324367819ffc2e746182248e3f66271af5"} Sep 29 21:27:45 crc kubenswrapper[4911]: I0929 21:27:45.830462 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2fdjj" event={"ID":"9a381472-08f8-4263-b008-573df71b1605","Type":"ContainerStarted","Data":"86d66f4942e7d603188a9cc17b901b85ba3f536d047c8579d60b8b0253d30e22"} Sep 29 21:27:45 crc kubenswrapper[4911]: I0929 21:27:45.830883 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8gplk"] Sep 29 21:27:45 crc kubenswrapper[4911]: I0929 21:27:45.832127 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-s62fl" event={"ID":"49f9ff74-e395-4c2a-b467-c248ab4ae4bb","Type":"ContainerStarted","Data":"87252ccee5da70082f00704abe4d7846b841195ba54bf89153a0b19110040fd7"} Sep 29 21:27:45 crc kubenswrapper[4911]: I0929 21:27:45.834003 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-5bjvj" event={"ID":"a7232476-6ca7-47a3-8a5d-2d0e26afebdc","Type":"ContainerStarted","Data":"b87c327ea0c148e0e34ccc155dc21d316e5c0d5a3c081218c397030772b57be6"} Sep 29 21:27:45 crc kubenswrapper[4911]: I0929 21:27:45.838222 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pjbd8" event={"ID":"9ff35d74-5cd5-4d25-bb50-8302240285b1","Type":"ContainerStarted","Data":"1aa51dc86173aa3dd2074843555c0922f7db407af7dbe2fefecb3671d9779566"} Sep 29 21:27:45 crc kubenswrapper[4911]: I0929 21:27:45.839216 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pjbd8" Sep 29 21:27:45 crc kubenswrapper[4911]: I0929 21:27:45.841945 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-f7vrh" event={"ID":"e1d74425-0991-4922-9b99-95bb5e1c596c","Type":"ContainerStarted","Data":"207ba8c6b73830a1a3c9eebe23c077155187bd7d52ccb778b3643f6bbc2bd449"} Sep 29 21:27:45 crc kubenswrapper[4911]: I0929 21:27:45.843081 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-f7vrh" Sep 29 21:27:45 crc kubenswrapper[4911]: I0929 21:27:45.845735 4911 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-pjbd8 container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.24:5443/healthz\": dial tcp 10.217.0.24:5443: connect: connection refused" start-of-body= Sep 29 21:27:45 crc kubenswrapper[4911]: I0929 21:27:45.845743 4911 patch_prober.go:28] interesting pod/console-operator-58897d9998-f7vrh container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.9:8443/readyz\": dial tcp 10.217.0.9:8443: connect: connection refused" start-of-body= Sep 29 21:27:45 crc kubenswrapper[4911]: I0929 21:27:45.845803 4911 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-f7vrh" podUID="e1d74425-0991-4922-9b99-95bb5e1c596c" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.9:8443/readyz\": dial tcp 10.217.0.9:8443: connect: connection refused" Sep 29 21:27:45 crc kubenswrapper[4911]: I0929 21:27:45.845776 4911 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pjbd8" podUID="9ff35d74-5cd5-4d25-bb50-8302240285b1" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.24:5443/healthz\": dial tcp 10.217.0.24:5443: connect: connection refused" Sep 29 21:27:45 crc kubenswrapper[4911]: I0929 21:27:45.847866 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-kxz4b" event={"ID":"b6d3166a-4bc1-416c-b6f4-68207a1b0ccb","Type":"ContainerStarted","Data":"7c0dfcb9d33e075307bfa99a944c584bf1c99581dabbb8c8658341545140f262"} Sep 29 21:27:45 crc kubenswrapper[4911]: I0929 21:27:45.853856 4911 generic.go:334] "Generic (PLEG): container finished" podID="cd6c061e-79d1-4353-b077-69cb656a8823" containerID="3fee9f92c3e1867799209f20846bb9bca48a0f9747dec6781d341d16de68e0f2" exitCode=0 Sep 29 21:27:45 crc kubenswrapper[4911]: I0929 21:27:45.853955 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-c5pgg" event={"ID":"cd6c061e-79d1-4353-b077-69cb656a8823","Type":"ContainerDied","Data":"3fee9f92c3e1867799209f20846bb9bca48a0f9747dec6781d341d16de68e0f2"} Sep 29 21:27:45 crc kubenswrapper[4911]: I0929 21:27:45.874254 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-mmgf6"] Sep 29 21:27:45 crc kubenswrapper[4911]: I0929 21:27:45.876937 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6rs7d" event={"ID":"a3def338-7395-4682-81be-d3e671abf2ed","Type":"ContainerStarted","Data":"2997a0d5b4c6da095d8b6c17957a23d08164493a2f7b6faa15eec761ea463728"} Sep 29 21:27:45 crc kubenswrapper[4911]: I0929 21:27:45.883087 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-cqtn6" event={"ID":"11c18a96-1e89-47a1-bae8-7184c53da82f","Type":"ContainerStarted","Data":"621cad6a96aeff1698624be47bcf7aabb36ba36a1fd5fd9cc1c0b9c1c2f6cbb5"} Sep 29 21:27:45 crc kubenswrapper[4911]: I0929 21:27:45.889454 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319675-qdtss" event={"ID":"2ad88e09-2e13-4070-9c1f-75cb9dd12ebf","Type":"ContainerStarted","Data":"f08a8994a1230c8c4a574e0226b25cc826ec56db3dc5e5bc0ab10cdbca2c84b1"} Sep 29 21:27:45 crc kubenswrapper[4911]: I0929 21:27:45.891870 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-k7lgs" event={"ID":"cce1e199-e6e7-41e1-b1b6-83f8d435c80e","Type":"ContainerStarted","Data":"62c0d61377ba87e7aabf65d710ea89644aa2620eaa250aab54e6dd69f5931188"} Sep 29 21:27:45 crc kubenswrapper[4911]: I0929 21:27:45.894631 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2gvvc" event={"ID":"9323b64c-27c9-45cc-92bf-520f640e5126","Type":"ContainerStarted","Data":"8d62b9b92db943a342657b26e8ce66a9c3434fbdd68314d4d871a594f7a848a9"} Sep 29 21:27:45 crc kubenswrapper[4911]: I0929 21:27:45.897813 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-h9qcg" event={"ID":"9a3253d3-c916-477b-82cd-7f7911bfc1b0","Type":"ContainerStarted","Data":"4170f3d0ab485bb97d6b890499a504c58a51a12fd94e01bb1f05bdd5ebe8e3b1"} Sep 29 21:27:45 crc kubenswrapper[4911]: I0929 21:27:45.898485 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-h9qcg" Sep 29 21:27:45 crc kubenswrapper[4911]: W0929 21:27:45.898495 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod21e1d3e8_e4aa_43d0_a5c8_61d2dba4a7af.slice/crio-6654bdcbcf52b1492695d23efe15fb9e8f9dcdb449dea2cde40faa646d13f6a6 WatchSource:0}: Error finding container 6654bdcbcf52b1492695d23efe15fb9e8f9dcdb449dea2cde40faa646d13f6a6: Status 404 returned error can't find the container with id 6654bdcbcf52b1492695d23efe15fb9e8f9dcdb449dea2cde40faa646d13f6a6 Sep 29 21:27:45 crc kubenswrapper[4911]: W0929 21:27:45.899120 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod72e24945_9968_4fe1_acd3_84d4c0e6099f.slice/crio-fefac9f470bc2ab4822702dc970728904ffe13b23afb78e7e00e0b74a715d9b2 WatchSource:0}: Error finding container fefac9f470bc2ab4822702dc970728904ffe13b23afb78e7e00e0b74a715d9b2: Status 404 returned error can't find the container with id fefac9f470bc2ab4822702dc970728904ffe13b23afb78e7e00e0b74a715d9b2 Sep 29 21:27:45 crc kubenswrapper[4911]: I0929 21:27:45.920192 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:27:45 crc kubenswrapper[4911]: E0929 21:27:45.923011 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 21:27:46.422976185 +0000 UTC m=+144.400088856 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:45 crc kubenswrapper[4911]: I0929 21:27:45.934422 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:45 crc kubenswrapper[4911]: E0929 21:27:45.937033 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 21:27:46.437011229 +0000 UTC m=+144.414123900 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8djg4" (UID: "4357f10f-dad3-4233-9d03-1cad6319e4a9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:45 crc kubenswrapper[4911]: I0929 21:27:45.946425 4911 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-h9qcg container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.13:6443/healthz\": dial tcp 10.217.0.13:6443: connect: connection refused" start-of-body= Sep 29 21:27:45 crc kubenswrapper[4911]: I0929 21:27:45.946481 4911 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-h9qcg" podUID="9a3253d3-c916-477b-82cd-7f7911bfc1b0" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.13:6443/healthz\": dial tcp 10.217.0.13:6443: connect: connection refused" Sep 29 21:27:45 crc kubenswrapper[4911]: I0929 21:27:45.952685 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-dn2nd" event={"ID":"3b6e2fa8-d098-4250-b417-c577a01c5975","Type":"ContainerStarted","Data":"e364387078e29d0b628a95e1e03771b07636a83b293b52162ab2262789f4f79e"} Sep 29 21:27:45 crc kubenswrapper[4911]: I0929 21:27:45.955966 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-dn2nd" Sep 29 21:27:45 crc kubenswrapper[4911]: I0929 21:27:45.965017 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-gx2fb" event={"ID":"bead4097-1138-4381-9884-93bbf059b717","Type":"ContainerStarted","Data":"5ee31c234ba006cff4a89add0e38c5c54070a7f3803e2818f39f0fb1dbd7822d"} Sep 29 21:27:45 crc kubenswrapper[4911]: I0929 21:27:45.967056 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-gx2fb" event={"ID":"bead4097-1138-4381-9884-93bbf059b717","Type":"ContainerStarted","Data":"fe0dd1fd9ead108d13d4ea8bbc5a2bf68da01442c9f87f020246e6f58c5f3209"} Sep 29 21:27:45 crc kubenswrapper[4911]: I0929 21:27:45.974809 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-rd6cq"] Sep 29 21:27:45 crc kubenswrapper[4911]: I0929 21:27:45.981532 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-wb4m9" event={"ID":"affba9da-62d0-47e6-b833-8b6c0e774fde","Type":"ContainerStarted","Data":"9f99c7076904ef9c169fbcb7a0cbf852cbb6da1bb5edd06eeb404691be774243"} Sep 29 21:27:45 crc kubenswrapper[4911]: I0929 21:27:45.982996 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-wb4m9" Sep 29 21:27:45 crc kubenswrapper[4911]: I0929 21:27:45.989305 4911 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-dn2nd container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.26:8443/healthz\": dial tcp 10.217.0.26:8443: connect: connection refused" start-of-body= Sep 29 21:27:45 crc kubenswrapper[4911]: I0929 21:27:45.989389 4911 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-dn2nd" podUID="3b6e2fa8-d098-4250-b417-c577a01c5975" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.26:8443/healthz\": dial tcp 10.217.0.26:8443: connect: connection refused" Sep 29 21:27:46 crc kubenswrapper[4911]: I0929 21:27:46.019726 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-j9nwx" event={"ID":"3636d93a-cce9-4086-9cdc-b907988e1ff8","Type":"ContainerStarted","Data":"940ea63abe8053406a8003aafee6e60ed95e75d3d9619c46c02fd2d95ede8b2d"} Sep 29 21:27:46 crc kubenswrapper[4911]: I0929 21:27:46.022902 4911 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-wb4m9 container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.7:8443/healthz\": dial tcp 10.217.0.7:8443: connect: connection refused" start-of-body= Sep 29 21:27:46 crc kubenswrapper[4911]: I0929 21:27:46.022963 4911 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-wb4m9" podUID="affba9da-62d0-47e6-b833-8b6c0e774fde" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.7:8443/healthz\": dial tcp 10.217.0.7:8443: connect: connection refused" Sep 29 21:27:46 crc kubenswrapper[4911]: I0929 21:27:46.024859 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-ms4bf" event={"ID":"a47f0c8e-aa7c-456e-a69d-9b3f4c02bfb8","Type":"ContainerStarted","Data":"d1b1657e0c5a4ac1dc1c3a670f382777cd8b10856c2d0104bb3405391a63ee30"} Sep 29 21:27:46 crc kubenswrapper[4911]: I0929 21:27:46.027511 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-2mk6j"] Sep 29 21:27:46 crc kubenswrapper[4911]: I0929 21:27:46.037181 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:27:46 crc kubenswrapper[4911]: E0929 21:27:46.040286 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 21:27:46.540262301 +0000 UTC m=+144.517374972 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:46 crc kubenswrapper[4911]: I0929 21:27:46.042942 4911 generic.go:334] "Generic (PLEG): container finished" podID="e66f4856-1a57-441f-9701-4f61008259c6" containerID="624bb51701253f6c917e095e9666ddee87d5690136497e994892b6ab4799c530" exitCode=0 Sep 29 21:27:46 crc kubenswrapper[4911]: I0929 21:27:46.043385 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-bgnq2"] Sep 29 21:27:46 crc kubenswrapper[4911]: I0929 21:27:46.043440 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6sql5" event={"ID":"e66f4856-1a57-441f-9701-4f61008259c6","Type":"ContainerDied","Data":"624bb51701253f6c917e095e9666ddee87d5690136497e994892b6ab4799c530"} Sep 29 21:27:46 crc kubenswrapper[4911]: I0929 21:27:46.048947 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-5f6q5"] Sep 29 21:27:46 crc kubenswrapper[4911]: I0929 21:27:46.053670 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-grwjm"] Sep 29 21:27:46 crc kubenswrapper[4911]: W0929 21:27:46.059275 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3b34b90b_583c_444d_921c_0d5fa13835d6.slice/crio-5633dc4d51c01805ab5607f461972ea8e9c4915d73c9a0c07802e9ca438f1bee WatchSource:0}: Error finding container 5633dc4d51c01805ab5607f461972ea8e9c4915d73c9a0c07802e9ca438f1bee: Status 404 returned error can't find the container with id 5633dc4d51c01805ab5607f461972ea8e9c4915d73c9a0c07802e9ca438f1bee Sep 29 21:27:46 crc kubenswrapper[4911]: I0929 21:27:46.059971 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-2pbvr"] Sep 29 21:27:46 crc kubenswrapper[4911]: I0929 21:27:46.061609 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-q2dkc" event={"ID":"bb0f6f96-6c75-46c0-9a5e-78310761816e","Type":"ContainerStarted","Data":"b03aca1ff2c87b1b859c1097691dc9e8db802f890f771bfbfafe2c3410d2b6c1"} Sep 29 21:27:46 crc kubenswrapper[4911]: I0929 21:27:46.144830 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:46 crc kubenswrapper[4911]: E0929 21:27:46.145296 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 21:27:46.645277327 +0000 UTC m=+144.622389998 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8djg4" (UID: "4357f10f-dad3-4233-9d03-1cad6319e4a9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:46 crc kubenswrapper[4911]: W0929 21:27:46.201961 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5d7dea83_ad4f_4f99_a281_1dcec6929b25.slice/crio-9e96247afe8e1cf4b090869d4529c58cc1a9e2d33777336b48c823085b3f492f WatchSource:0}: Error finding container 9e96247afe8e1cf4b090869d4529c58cc1a9e2d33777336b48c823085b3f492f: Status 404 returned error can't find the container with id 9e96247afe8e1cf4b090869d4529c58cc1a9e2d33777336b48c823085b3f492f Sep 29 21:27:46 crc kubenswrapper[4911]: I0929 21:27:46.210520 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-nh5bz"] Sep 29 21:27:46 crc kubenswrapper[4911]: I0929 21:27:46.212149 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-szrp2" podStartSLOduration=123.211376956 podStartE2EDuration="2m3.211376956s" podCreationTimestamp="2025-09-29 21:25:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:27:46.202389194 +0000 UTC m=+144.179501865" watchObservedRunningTime="2025-09-29 21:27:46.211376956 +0000 UTC m=+144.188489627" Sep 29 21:27:46 crc kubenswrapper[4911]: I0929 21:27:46.236374 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-zqthg"] Sep 29 21:27:46 crc kubenswrapper[4911]: I0929 21:27:46.244883 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-cp2jw"] Sep 29 21:27:46 crc kubenswrapper[4911]: I0929 21:27:46.245690 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:27:46 crc kubenswrapper[4911]: E0929 21:27:46.246770 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 21:27:46.746749455 +0000 UTC m=+144.723862116 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:46 crc kubenswrapper[4911]: I0929 21:27:46.302733 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-gx2fb" Sep 29 21:27:46 crc kubenswrapper[4911]: I0929 21:27:46.306641 4911 patch_prober.go:28] interesting pod/router-default-5444994796-gx2fb container/router namespace/openshift-ingress: Startup probe status=failure output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" start-of-body= Sep 29 21:27:46 crc kubenswrapper[4911]: I0929 21:27:46.306722 4911 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-gx2fb" podUID="bead4097-1138-4381-9884-93bbf059b717" containerName="router" probeResult="failure" output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" Sep 29 21:27:46 crc kubenswrapper[4911]: I0929 21:27:46.314746 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-wb4m9" podStartSLOduration=123.31471886 podStartE2EDuration="2m3.31471886s" podCreationTimestamp="2025-09-29 21:25:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:27:46.313975398 +0000 UTC m=+144.291088069" watchObservedRunningTime="2025-09-29 21:27:46.31471886 +0000 UTC m=+144.291831531" Sep 29 21:27:46 crc kubenswrapper[4911]: I0929 21:27:46.348827 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:46 crc kubenswrapper[4911]: E0929 21:27:46.349220 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 21:27:46.849207223 +0000 UTC m=+144.826319894 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8djg4" (UID: "4357f10f-dad3-4233-9d03-1cad6319e4a9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:46 crc kubenswrapper[4911]: I0929 21:27:46.366287 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2fdjj" podStartSLOduration=123.366261689 podStartE2EDuration="2m3.366261689s" podCreationTimestamp="2025-09-29 21:25:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:27:46.355338108 +0000 UTC m=+144.332450769" watchObservedRunningTime="2025-09-29 21:27:46.366261689 +0000 UTC m=+144.343374360" Sep 29 21:27:46 crc kubenswrapper[4911]: I0929 21:27:46.436162 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-f7vrh" podStartSLOduration=123.436135302 podStartE2EDuration="2m3.436135302s" podCreationTimestamp="2025-09-29 21:25:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:27:46.436001588 +0000 UTC m=+144.413114269" watchObservedRunningTime="2025-09-29 21:27:46.436135302 +0000 UTC m=+144.413247973" Sep 29 21:27:46 crc kubenswrapper[4911]: I0929 21:27:46.436750 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-dn2nd" podStartSLOduration=123.436744391 podStartE2EDuration="2m3.436744391s" podCreationTimestamp="2025-09-29 21:25:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:27:46.400047151 +0000 UTC m=+144.377159822" watchObservedRunningTime="2025-09-29 21:27:46.436744391 +0000 UTC m=+144.413857062" Sep 29 21:27:46 crc kubenswrapper[4911]: I0929 21:27:46.450674 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:27:46 crc kubenswrapper[4911]: E0929 21:27:46.450851 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 21:27:46.950830376 +0000 UTC m=+144.927943037 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:46 crc kubenswrapper[4911]: I0929 21:27:46.451295 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:46 crc kubenswrapper[4911]: E0929 21:27:46.451657 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 21:27:46.951649931 +0000 UTC m=+144.928762602 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8djg4" (UID: "4357f10f-dad3-4233-9d03-1cad6319e4a9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:46 crc kubenswrapper[4911]: I0929 21:27:46.487448 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-h9qcg" podStartSLOduration=124.487413253 podStartE2EDuration="2m4.487413253s" podCreationTimestamp="2025-09-29 21:25:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:27:46.483726821 +0000 UTC m=+144.460839492" watchObservedRunningTime="2025-09-29 21:27:46.487413253 +0000 UTC m=+144.464525924" Sep 29 21:27:46 crc kubenswrapper[4911]: I0929 21:27:46.553115 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:27:46 crc kubenswrapper[4911]: E0929 21:27:46.553303 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 21:27:47.053246454 +0000 UTC m=+145.030359125 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:46 crc kubenswrapper[4911]: I0929 21:27:46.554279 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pjbd8" podStartSLOduration=123.554258334 podStartE2EDuration="2m3.554258334s" podCreationTimestamp="2025-09-29 21:25:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:27:46.514373058 +0000 UTC m=+144.491485749" watchObservedRunningTime="2025-09-29 21:27:46.554258334 +0000 UTC m=+144.531371015" Sep 29 21:27:46 crc kubenswrapper[4911]: I0929 21:27:46.555877 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:46 crc kubenswrapper[4911]: E0929 21:27:46.557167 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 21:27:47.0567838 +0000 UTC m=+145.033896471 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8djg4" (UID: "4357f10f-dad3-4233-9d03-1cad6319e4a9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:46 crc kubenswrapper[4911]: I0929 21:27:46.594600 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-q2dkc" podStartSLOduration=5.594575493 podStartE2EDuration="5.594575493s" podCreationTimestamp="2025-09-29 21:27:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:27:46.557799231 +0000 UTC m=+144.534911902" watchObservedRunningTime="2025-09-29 21:27:46.594575493 +0000 UTC m=+144.571688164" Sep 29 21:27:46 crc kubenswrapper[4911]: I0929 21:27:46.636883 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-dxnj8" podStartSLOduration=123.636855682 podStartE2EDuration="2m3.636855682s" podCreationTimestamp="2025-09-29 21:25:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:27:46.636402057 +0000 UTC m=+144.613514738" watchObservedRunningTime="2025-09-29 21:27:46.636855682 +0000 UTC m=+144.613968353" Sep 29 21:27:46 crc kubenswrapper[4911]: I0929 21:27:46.637872 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-gx2fb" podStartSLOduration=123.637864542 podStartE2EDuration="2m3.637864542s" podCreationTimestamp="2025-09-29 21:25:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:27:46.59647498 +0000 UTC m=+144.573587651" watchObservedRunningTime="2025-09-29 21:27:46.637864542 +0000 UTC m=+144.614977213" Sep 29 21:27:46 crc kubenswrapper[4911]: I0929 21:27:46.656737 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:27:46 crc kubenswrapper[4911]: E0929 21:27:46.657577 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 21:27:47.157556327 +0000 UTC m=+145.134668998 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:46 crc kubenswrapper[4911]: I0929 21:27:46.737469 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-5bjvj" podStartSLOduration=123.737440763 podStartE2EDuration="2m3.737440763s" podCreationTimestamp="2025-09-29 21:25:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:27:46.715669234 +0000 UTC m=+144.692781905" watchObservedRunningTime="2025-09-29 21:27:46.737440763 +0000 UTC m=+144.714553424" Sep 29 21:27:46 crc kubenswrapper[4911]: I0929 21:27:46.760527 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:46 crc kubenswrapper[4911]: E0929 21:27:46.761096 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 21:27:47.261077938 +0000 UTC m=+145.238190609 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8djg4" (UID: "4357f10f-dad3-4233-9d03-1cad6319e4a9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:46 crc kubenswrapper[4911]: I0929 21:27:46.867384 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:27:46 crc kubenswrapper[4911]: E0929 21:27:46.867871 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 21:27:47.367847487 +0000 UTC m=+145.344960158 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:46 crc kubenswrapper[4911]: I0929 21:27:46.968948 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:46 crc kubenswrapper[4911]: E0929 21:27:46.969627 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 21:27:47.469613864 +0000 UTC m=+145.446726535 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8djg4" (UID: "4357f10f-dad3-4233-9d03-1cad6319e4a9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.071190 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:27:47 crc kubenswrapper[4911]: E0929 21:27:47.071393 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 21:27:47.57133951 +0000 UTC m=+145.548452181 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.071981 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:47 crc kubenswrapper[4911]: E0929 21:27:47.072364 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 21:27:47.572349741 +0000 UTC m=+145.549462412 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8djg4" (UID: "4357f10f-dad3-4233-9d03-1cad6319e4a9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.074201 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-cp2jw" event={"ID":"db0c906b-7d60-4048-ad1d-e9765282348f","Type":"ContainerStarted","Data":"2adea4e4b028df8cfcaa3dba9b3659f399deefdbf277408318abf2b1dabb8672"} Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.080804 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5f6q5" event={"ID":"639123e9-fc56-4554-a59a-0d2c3866c340","Type":"ContainerStarted","Data":"e30b9935fb962bd1160274e68b7222f69fc4a7f33176f7cbb40a0ed8235edddd"} Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.080864 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5f6q5" event={"ID":"639123e9-fc56-4554-a59a-0d2c3866c340","Type":"ContainerStarted","Data":"8e5dec87af57930128d9b184359f129dc040cb63c5bd0492a0a0c02141acc197"} Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.084946 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319675-qdtss" event={"ID":"2ad88e09-2e13-4070-9c1f-75cb9dd12ebf","Type":"ContainerStarted","Data":"4338ac5cad4c96f3f2896e8f12a7c11398b9594037fa7d204caf3949624f5c8e"} Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.107007 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-rlm59" event={"ID":"d4fc0a96-1ce0-4e5d-a743-3256db5295bb","Type":"ContainerStarted","Data":"0d57cdc505d3513eeb8fe77e984ac8704eac241f08d635219a2d6805bd3bf0bb"} Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.107059 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-rlm59" event={"ID":"d4fc0a96-1ce0-4e5d-a743-3256db5295bb","Type":"ContainerStarted","Data":"f25e30a1cef4fad2fc98ea635065d50cef4ffcd419bdbab393ff405f83c27a76"} Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.111000 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-2pbvr" event={"ID":"5d7dea83-ad4f-4f99-a281-1dcec6929b25","Type":"ContainerStarted","Data":"9e96247afe8e1cf4b090869d4529c58cc1a9e2d33777336b48c823085b3f492f"} Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.117765 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-s8dhg" event={"ID":"615ffb8b-fb38-488c-b326-df6086017073","Type":"ContainerStarted","Data":"6b1da2c46960ab94bdd010db042ac3c495610e1966348fb29478628532259603"} Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.117826 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-s8dhg" event={"ID":"615ffb8b-fb38-488c-b326-df6086017073","Type":"ContainerStarted","Data":"f1e6abefb7fef8bb5582277656c9d95b0d3e3137b030efb3697a5d9d1c697388"} Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.118575 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-s8dhg" Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.123839 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-k7lgs" event={"ID":"cce1e199-e6e7-41e1-b1b6-83f8d435c80e","Type":"ContainerStarted","Data":"2d4b093bcf67f6ceff8d6d668cd4c0f8deb65b36b4834f6994906c27cfe16480"} Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.128784 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-q2dkc" event={"ID":"bb0f6f96-6c75-46c0-9a5e-78310761816e","Type":"ContainerStarted","Data":"62f1432e899b57d5bb7fd45e1ef33ab133139425d76e0708962a5800b9d50ada"} Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.133696 4911 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-s8dhg container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.18:8080/healthz\": dial tcp 10.217.0.18:8080: connect: connection refused" start-of-body= Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.133758 4911 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-s8dhg" podUID="615ffb8b-fb38-488c-b326-df6086017073" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.18:8080/healthz\": dial tcp 10.217.0.18:8080: connect: connection refused" Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.138991 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-c5pgg" event={"ID":"cd6c061e-79d1-4353-b077-69cb656a8823","Type":"ContainerStarted","Data":"0ab6811d329adb834df65ab0c933045afb64dce76d653334bc2d3b9ec92a4b14"} Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.139102 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-c5pgg" Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.142044 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-zqthg" event={"ID":"9da83d12-76d2-4194-a344-a1453b536a27","Type":"ContainerStarted","Data":"dfcce5c36e89f6f3458593a7a97aa381f5eb1987df5f48d51268cfd978772ee1"} Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.146841 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-mmgf6" event={"ID":"d3eae054-c8ac-4d16-a058-8a724a8a6d0f","Type":"ContainerStarted","Data":"6e2d4d5aef3e1ce93ede294132696d948587e9562e077d4ee6331fb77ef53e8a"} Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.146949 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-mmgf6" event={"ID":"d3eae054-c8ac-4d16-a058-8a724a8a6d0f","Type":"ContainerStarted","Data":"e37face20374e06339a2ff9976d2e89518fa14ac14cd43d69076e6e13205a7be"} Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.156841 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29319675-qdtss" podStartSLOduration=125.156821385 podStartE2EDuration="2m5.156821385s" podCreationTimestamp="2025-09-29 21:25:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:27:47.117131135 +0000 UTC m=+145.094243806" watchObservedRunningTime="2025-09-29 21:27:47.156821385 +0000 UTC m=+145.133934056" Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.170370 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-rlm59" podStartSLOduration=124.170356244 podStartE2EDuration="2m4.170356244s" podCreationTimestamp="2025-09-29 21:25:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:27:47.156438534 +0000 UTC m=+145.133551205" watchObservedRunningTime="2025-09-29 21:27:47.170356244 +0000 UTC m=+145.147468915" Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.177574 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:27:47 crc kubenswrapper[4911]: E0929 21:27:47.180959 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 21:27:47.680918204 +0000 UTC m=+145.658030875 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.198008 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-s8dhg" podStartSLOduration=124.197984799 podStartE2EDuration="2m4.197984799s" podCreationTimestamp="2025-09-29 21:25:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:27:47.183282805 +0000 UTC m=+145.160395486" watchObservedRunningTime="2025-09-29 21:27:47.197984799 +0000 UTC m=+145.175097470" Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.217681 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-k7lgs" podStartSLOduration=124.217645625 podStartE2EDuration="2m4.217645625s" podCreationTimestamp="2025-09-29 21:25:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:27:47.214356015 +0000 UTC m=+145.191468696" watchObservedRunningTime="2025-09-29 21:27:47.217645625 +0000 UTC m=+145.194758296" Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.230806 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-cqtn6" event={"ID":"11c18a96-1e89-47a1-bae8-7184c53da82f","Type":"ContainerStarted","Data":"3ed6ae24335ddecb3c538504bfa92d55bb9882e4807d55a7757b601647481746"} Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.254467 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-mmgf6" podStartSLOduration=124.254438467 podStartE2EDuration="2m4.254438467s" podCreationTimestamp="2025-09-29 21:25:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:27:47.248429245 +0000 UTC m=+145.225541936" watchObservedRunningTime="2025-09-29 21:27:47.254438467 +0000 UTC m=+145.231551148" Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.275673 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-j9nwx" event={"ID":"3636d93a-cce9-4086-9cdc-b907988e1ff8","Type":"ContainerStarted","Data":"25c311c82eec9dd0acf7645e99ebef4a3610044e999fb96110b53aadd7e6a72e"} Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.281383 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:47 crc kubenswrapper[4911]: E0929 21:27:47.283718 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 21:27:47.783688021 +0000 UTC m=+145.760800692 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8djg4" (UID: "4357f10f-dad3-4233-9d03-1cad6319e4a9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.320003 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-c5pgg" podStartSLOduration=124.319977799 podStartE2EDuration="2m4.319977799s" podCreationTimestamp="2025-09-29 21:25:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:27:47.270341158 +0000 UTC m=+145.247453839" watchObservedRunningTime="2025-09-29 21:27:47.319977799 +0000 UTC m=+145.297090470" Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.320748 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-cqtn6" podStartSLOduration=6.320742052 podStartE2EDuration="6.320742052s" podCreationTimestamp="2025-09-29 21:27:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:27:47.318762513 +0000 UTC m=+145.295875184" watchObservedRunningTime="2025-09-29 21:27:47.320742052 +0000 UTC m=+145.297854723" Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.331048 4911 patch_prober.go:28] interesting pod/router-default-5444994796-gx2fb container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 21:27:47 crc kubenswrapper[4911]: [-]has-synced failed: reason withheld Sep 29 21:27:47 crc kubenswrapper[4911]: [+]process-running ok Sep 29 21:27:47 crc kubenswrapper[4911]: healthz check failed Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.331172 4911 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-gx2fb" podUID="bead4097-1138-4381-9884-93bbf059b717" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.362324 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-nwjdw" event={"ID":"21e1d3e8-e4aa-43d0-a5c8-61d2dba4a7af","Type":"ContainerStarted","Data":"6654bdcbcf52b1492695d23efe15fb9e8f9dcdb449dea2cde40faa646d13f6a6"} Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.369243 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-j9nwx" podStartSLOduration=124.369211478 podStartE2EDuration="2m4.369211478s" podCreationTimestamp="2025-09-29 21:25:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:27:47.367974681 +0000 UTC m=+145.345087352" watchObservedRunningTime="2025-09-29 21:27:47.369211478 +0000 UTC m=+145.346324149" Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.384637 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:27:47 crc kubenswrapper[4911]: E0929 21:27:47.385966 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 21:27:47.885938884 +0000 UTC m=+145.863051555 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.390472 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-5bjvj" event={"ID":"a7232476-6ca7-47a3-8a5d-2d0e26afebdc","Type":"ContainerStarted","Data":"e1f5b64f0c4fb5a1be184c3cb29d315aac8ca248c3b3b1488abd2f0cf1d07cae"} Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.418512 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-ms4bf" event={"ID":"a47f0c8e-aa7c-456e-a69d-9b3f4c02bfb8","Type":"ContainerStarted","Data":"63bb978d8bddd3c21688e6529bf33da9da517867aae6f5d2f5bef123183944f0"} Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.438251 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2mk6j" event={"ID":"cd47cdce-41f4-416c-9436-4c386c50eb9e","Type":"ContainerStarted","Data":"8611bf8580ace2af6afbb6c98553df67772f72b53097de3d1642fadd45422dc8"} Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.438988 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2mk6j" Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.460220 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-bgnq2" event={"ID":"36aba054-4229-40fb-8fd4-344cd9f61a40","Type":"ContainerStarted","Data":"a269238844f859da20bc53753e20991da07c58f6f45fa4c33d927cb73a5f3ddc"} Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.464419 4911 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-2mk6j container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.22:8443/healthz\": dial tcp 10.217.0.22:8443: connect: connection refused" start-of-body= Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.464474 4911 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2mk6j" podUID="cd47cdce-41f4-416c-9436-4c386c50eb9e" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.22:8443/healthz\": dial tcp 10.217.0.22:8443: connect: connection refused" Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.486015 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.487620 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-rd6cq" event={"ID":"3b34b90b-583c-444d-921c-0d5fa13835d6","Type":"ContainerStarted","Data":"5633dc4d51c01805ab5607f461972ea8e9c4915d73c9a0c07802e9ca438f1bee"} Sep 29 21:27:47 crc kubenswrapper[4911]: E0929 21:27:47.489784 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 21:27:47.989759613 +0000 UTC m=+145.966872284 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8djg4" (UID: "4357f10f-dad3-4233-9d03-1cad6319e4a9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.500341 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2mk6j" podStartSLOduration=124.500315583 podStartE2EDuration="2m4.500315583s" podCreationTimestamp="2025-09-29 21:25:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:27:47.499104086 +0000 UTC m=+145.476216767" watchObservedRunningTime="2025-09-29 21:27:47.500315583 +0000 UTC m=+145.477428264" Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.501988 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-ms4bf" podStartSLOduration=124.501980353 podStartE2EDuration="2m4.501980353s" podCreationTimestamp="2025-09-29 21:25:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:27:47.45889443 +0000 UTC m=+145.436007111" watchObservedRunningTime="2025-09-29 21:27:47.501980353 +0000 UTC m=+145.479093024" Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.518901 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-kxz4b" event={"ID":"b6d3166a-4bc1-416c-b6f4-68207a1b0ccb","Type":"ContainerStarted","Data":"910f6b6c0ea13c7c6bffd07a27cc97cfa8a48328a9a84bd6c695e7b40fccf904"} Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.524970 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6rs7d" event={"ID":"a3def338-7395-4682-81be-d3e671abf2ed","Type":"ContainerStarted","Data":"cf8fb606142820f058709af9a513fa6e2f8fb2724f13485bc5b66ee79b9a9eda"} Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.525028 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6rs7d" event={"ID":"a3def338-7395-4682-81be-d3e671abf2ed","Type":"ContainerStarted","Data":"7fe16c47aec0f94f6366e13144e0ea65d39ab0cd8ed4db5e081aaafc676a5f77"} Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.531678 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-rd6cq" podStartSLOduration=125.53164786 podStartE2EDuration="2m5.53164786s" podCreationTimestamp="2025-09-29 21:25:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:27:47.52704704 +0000 UTC m=+145.504159711" watchObservedRunningTime="2025-09-29 21:27:47.53164786 +0000 UTC m=+145.508760531" Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.543574 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-grwjm" event={"ID":"ae2112f8-47a1-4faf-8ee6-83f96c5a3def","Type":"ContainerStarted","Data":"75ac87b380833605814e16621f410649d5c28d03440ad61346da9b9e43b93a4c"} Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.548281 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-rtrxr" event={"ID":"3f8faa64-f7f4-4ff5-a016-bb927dd0ec78","Type":"ContainerStarted","Data":"0c10373df997e4491d6adc99d1d61dd97e2f436550c300d63e2b89e3592cccd3"} Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.558155 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8gplk" event={"ID":"72e24945-9968-4fe1-acd3-84d4c0e6099f","Type":"ContainerStarted","Data":"c3feed696a7b341d51e67e2c5ad19fc83dc9ac2c2e225c8d7fc3e57af073b724"} Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.558231 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8gplk" event={"ID":"72e24945-9968-4fe1-acd3-84d4c0e6099f","Type":"ContainerStarted","Data":"fefac9f470bc2ab4822702dc970728904ffe13b23afb78e7e00e0b74a715d9b2"} Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.559240 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8gplk" Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.559399 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6rs7d" podStartSLOduration=124.559387259 podStartE2EDuration="2m4.559387259s" podCreationTimestamp="2025-09-29 21:25:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:27:47.558571474 +0000 UTC m=+145.535684165" watchObservedRunningTime="2025-09-29 21:27:47.559387259 +0000 UTC m=+145.536499950" Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.576025 4911 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-8gplk container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.35:8443/healthz\": dial tcp 10.217.0.35:8443: connect: connection refused" start-of-body= Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.576112 4911 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8gplk" podUID="72e24945-9968-4fe1-acd3-84d4c0e6099f" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.35:8443/healthz\": dial tcp 10.217.0.35:8443: connect: connection refused" Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.591308 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.635703 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2gvvc" event={"ID":"9323b64c-27c9-45cc-92bf-520f640e5126","Type":"ContainerStarted","Data":"f2d5219b1de697dedf88572ad93693e4a07274867a0bf4e9bedf21abc0877af4"} Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.636292 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2gvvc" Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.639872 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-mrbmd" event={"ID":"7362e2ef-cab6-4891-bb59-a7969b6e72b6","Type":"ContainerStarted","Data":"fe94ea18afa6e9cbf9dd78ac98d8b01e104794cc6a56c749cb16ab7e00fab7f1"} Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.639938 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-mrbmd" event={"ID":"7362e2ef-cab6-4891-bb59-a7969b6e72b6","Type":"ContainerStarted","Data":"4d7138066a9b85851f784ead5b936c8d7f2d9d9f96c6d0e18dcc6885e10b9979"} Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.640803 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-mrbmd" Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.642345 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-dl4br" event={"ID":"79387310-5596-4b5f-af33-0f6b8a9a40ff","Type":"ContainerStarted","Data":"cd2f2eeae2659498d4fa32843c1ed660fe427aac9e276a81508fa32f40ca5b94"} Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.645465 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-s62fl" event={"ID":"49f9ff74-e395-4c2a-b467-c248ab4ae4bb","Type":"ContainerStarted","Data":"4d4f3bf3961c8ff5e043a827c40734e52404081bc15c91d30df470274d70b365"} Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.680381 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-wslbb" event={"ID":"d3db165e-e40f-4731-9bc8-518b8bf79f14","Type":"ContainerStarted","Data":"14772fb03dfcb97128f27dcfbf7dd8456476881ea91022a68514cf58404fe80d"} Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.680451 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-wslbb" event={"ID":"d3db165e-e40f-4731-9bc8-518b8bf79f14","Type":"ContainerStarted","Data":"bfede2291b045e09d6190ade4bf71c334472465a65dbc2f52426db6d3973bde8"} Sep 29 21:27:47 crc kubenswrapper[4911]: E0929 21:27:47.683762 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 21:27:48.183730569 +0000 UTC m=+146.160843240 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.689523 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8gplk" podStartSLOduration=124.689502493 podStartE2EDuration="2m4.689502493s" podCreationTimestamp="2025-09-29 21:25:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:27:47.677149149 +0000 UTC m=+145.654261840" watchObservedRunningTime="2025-09-29 21:27:47.689502493 +0000 UTC m=+145.666615154" Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.718071 4911 patch_prober.go:28] interesting pod/downloads-7954f5f757-mrbmd container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.718180 4911 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-mrbmd" podUID="7362e2ef-cab6-4891-bb59-a7969b6e72b6" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.747343 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-dn2nd" event={"ID":"3b6e2fa8-d098-4250-b417-c577a01c5975","Type":"ContainerStarted","Data":"3be0b04174f9fe9169b6ddeaa8ea171e6cba84aa27918b609d138ab9cb32a1e6"} Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.769353 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-dn2nd" Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.770023 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:47 crc kubenswrapper[4911]: E0929 21:27:47.776425 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 21:27:48.276406911 +0000 UTC m=+146.253519582 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8djg4" (UID: "4357f10f-dad3-4233-9d03-1cad6319e4a9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.780419 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nh5bz" event={"ID":"8fd16ad3-3d7f-4b84-baf5-bb7a3dd07cd9","Type":"ContainerStarted","Data":"457198aa3f78f8268fdd008d1eb12d74a969a18d7c949aa5e8cffb8b07a9bc44"} Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.811644 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-mrbmd" podStartSLOduration=124.811620696 podStartE2EDuration="2m4.811620696s" podCreationTimestamp="2025-09-29 21:25:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:27:47.793780356 +0000 UTC m=+145.770893027" watchObservedRunningTime="2025-09-29 21:27:47.811620696 +0000 UTC m=+145.788733377" Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.822506 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-h9qcg" Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.829387 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-wb4m9" Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.874545 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:27:47 crc kubenswrapper[4911]: E0929 21:27:47.876112 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 21:27:48.376091526 +0000 UTC m=+146.353204197 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.903279 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-f7vrh" Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.904698 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-s62fl" podStartSLOduration=124.90468682 podStartE2EDuration="2m4.90468682s" podCreationTimestamp="2025-09-29 21:25:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:27:47.856545704 +0000 UTC m=+145.833658395" watchObservedRunningTime="2025-09-29 21:27:47.90468682 +0000 UTC m=+145.881799491" Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.933595 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2gvvc" podStartSLOduration=124.933571794 podStartE2EDuration="2m4.933571794s" podCreationTimestamp="2025-09-29 21:25:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:27:47.906280298 +0000 UTC m=+145.883392969" watchObservedRunningTime="2025-09-29 21:27:47.933571794 +0000 UTC m=+145.910684465" Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.979832 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:47 crc kubenswrapper[4911]: E0929 21:27:47.980274 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 21:27:48.480258426 +0000 UTC m=+146.457371097 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8djg4" (UID: "4357f10f-dad3-4233-9d03-1cad6319e4a9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:47 crc kubenswrapper[4911]: I0929 21:27:47.983084 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-dl4br" podStartSLOduration=124.98302612 podStartE2EDuration="2m4.98302612s" podCreationTimestamp="2025-09-29 21:25:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:27:47.93607634 +0000 UTC m=+145.913189011" watchObservedRunningTime="2025-09-29 21:27:47.98302612 +0000 UTC m=+145.960138791" Sep 29 21:27:48 crc kubenswrapper[4911]: I0929 21:27:48.084457 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:27:48 crc kubenswrapper[4911]: E0929 21:27:48.084953 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 21:27:48.58493411 +0000 UTC m=+146.562046781 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:48 crc kubenswrapper[4911]: I0929 21:27:48.112359 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-wslbb" podStartSLOduration=125.112335519 podStartE2EDuration="2m5.112335519s" podCreationTimestamp="2025-09-29 21:25:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:27:48.111025669 +0000 UTC m=+146.088138340" watchObservedRunningTime="2025-09-29 21:27:48.112335519 +0000 UTC m=+146.089448190" Sep 29 21:27:48 crc kubenswrapper[4911]: I0929 21:27:48.186761 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:48 crc kubenswrapper[4911]: E0929 21:27:48.187168 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 21:27:48.687155562 +0000 UTC m=+146.664268233 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8djg4" (UID: "4357f10f-dad3-4233-9d03-1cad6319e4a9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:48 crc kubenswrapper[4911]: I0929 21:27:48.280600 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pjbd8" Sep 29 21:27:48 crc kubenswrapper[4911]: I0929 21:27:48.288469 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:27:48 crc kubenswrapper[4911]: E0929 21:27:48.288651 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 21:27:48.78862978 +0000 UTC m=+146.765742441 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:48 crc kubenswrapper[4911]: I0929 21:27:48.288776 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:48 crc kubenswrapper[4911]: E0929 21:27:48.289221 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 21:27:48.789207958 +0000 UTC m=+146.766320629 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8djg4" (UID: "4357f10f-dad3-4233-9d03-1cad6319e4a9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:48 crc kubenswrapper[4911]: I0929 21:27:48.308030 4911 patch_prober.go:28] interesting pod/router-default-5444994796-gx2fb container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 21:27:48 crc kubenswrapper[4911]: [-]has-synced failed: reason withheld Sep 29 21:27:48 crc kubenswrapper[4911]: [+]process-running ok Sep 29 21:27:48 crc kubenswrapper[4911]: healthz check failed Sep 29 21:27:48 crc kubenswrapper[4911]: I0929 21:27:48.308110 4911 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-gx2fb" podUID="bead4097-1138-4381-9884-93bbf059b717" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 21:27:48 crc kubenswrapper[4911]: I0929 21:27:48.390447 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:27:48 crc kubenswrapper[4911]: E0929 21:27:48.390983 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 21:27:48.890940304 +0000 UTC m=+146.868052975 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:48 crc kubenswrapper[4911]: I0929 21:27:48.391269 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:48 crc kubenswrapper[4911]: E0929 21:27:48.391633 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 21:27:48.891614794 +0000 UTC m=+146.868727465 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8djg4" (UID: "4357f10f-dad3-4233-9d03-1cad6319e4a9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:48 crc kubenswrapper[4911]: I0929 21:27:48.492994 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:27:48 crc kubenswrapper[4911]: E0929 21:27:48.493213 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 21:27:48.993171736 +0000 UTC m=+146.970284417 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:48 crc kubenswrapper[4911]: I0929 21:27:48.493351 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:48 crc kubenswrapper[4911]: E0929 21:27:48.493778 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 21:27:48.993761463 +0000 UTC m=+146.970874134 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8djg4" (UID: "4357f10f-dad3-4233-9d03-1cad6319e4a9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:48 crc kubenswrapper[4911]: I0929 21:27:48.594578 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:27:48 crc kubenswrapper[4911]: E0929 21:27:48.594900 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 21:27:49.09485518 +0000 UTC m=+147.071967851 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:48 crc kubenswrapper[4911]: I0929 21:27:48.594988 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:48 crc kubenswrapper[4911]: E0929 21:27:48.595403 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 21:27:49.095381857 +0000 UTC m=+147.072494578 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8djg4" (UID: "4357f10f-dad3-4233-9d03-1cad6319e4a9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:48 crc kubenswrapper[4911]: I0929 21:27:48.696808 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:27:48 crc kubenswrapper[4911]: E0929 21:27:48.696967 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 21:27:49.196933367 +0000 UTC m=+147.174046038 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:48 crc kubenswrapper[4911]: I0929 21:27:48.697142 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:48 crc kubenswrapper[4911]: E0929 21:27:48.697605 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 21:27:49.197586267 +0000 UTC m=+147.174698938 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8djg4" (UID: "4357f10f-dad3-4233-9d03-1cad6319e4a9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:48 crc kubenswrapper[4911]: I0929 21:27:48.785889 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-grwjm" event={"ID":"ae2112f8-47a1-4faf-8ee6-83f96c5a3def","Type":"ContainerStarted","Data":"7fd57e5df56d69dfd8ade58d6bab93740b5b6199edc820cdb4c792b5055e98cc"} Sep 29 21:27:48 crc kubenswrapper[4911]: I0929 21:27:48.789622 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-rtrxr" event={"ID":"3f8faa64-f7f4-4ff5-a016-bb927dd0ec78","Type":"ContainerStarted","Data":"db260e5b237a4990779b420cd748fafdaa771b6fbf9427a82263aab5bcca2ca9"} Sep 29 21:27:48 crc kubenswrapper[4911]: I0929 21:27:48.792051 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-rd6cq" event={"ID":"3b34b90b-583c-444d-921c-0d5fa13835d6","Type":"ContainerStarted","Data":"f650a19bb4cf1d15f9ce5b6017c3d7f75514e498a5fd4fb921c65ef50ae17ed8"} Sep 29 21:27:48 crc kubenswrapper[4911]: I0929 21:27:48.795043 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-nwjdw" event={"ID":"21e1d3e8-e4aa-43d0-a5c8-61d2dba4a7af","Type":"ContainerStarted","Data":"6f3a622e57eca17d0faaf21714593df68830710826a0cf42660b23ec3d448d13"} Sep 29 21:27:48 crc kubenswrapper[4911]: I0929 21:27:48.795110 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-nwjdw" event={"ID":"21e1d3e8-e4aa-43d0-a5c8-61d2dba4a7af","Type":"ContainerStarted","Data":"c310ff6b8cb40422fa1f64d4344affc82a7463e98be7e8e903ed60c3a1c168d6"} Sep 29 21:27:48 crc kubenswrapper[4911]: I0929 21:27:48.800252 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:27:48 crc kubenswrapper[4911]: E0929 21:27:48.801661 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 21:27:49.301637263 +0000 UTC m=+147.278749934 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:48 crc kubenswrapper[4911]: I0929 21:27:48.802906 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-zqthg" event={"ID":"9da83d12-76d2-4194-a344-a1453b536a27","Type":"ContainerStarted","Data":"bed8b6f082f3ba1b544da4c6527d9278fd937722e7d172257e8a05e8d18eceab"} Sep 29 21:27:48 crc kubenswrapper[4911]: I0929 21:27:48.805571 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6sql5" event={"ID":"e66f4856-1a57-441f-9701-4f61008259c6","Type":"ContainerStarted","Data":"997dba716e1ecfa1a9af2a1bf8a130aedc92c1b82736a2e1decd6def54568d57"} Sep 29 21:27:48 crc kubenswrapper[4911]: I0929 21:27:48.807319 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-cp2jw" event={"ID":"db0c906b-7d60-4048-ad1d-e9765282348f","Type":"ContainerStarted","Data":"3f377f6c108add8acff11d43678540987bee3b955bb2c83e8591477db47b17b3"} Sep 29 21:27:48 crc kubenswrapper[4911]: I0929 21:27:48.845136 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-dl4br" event={"ID":"79387310-5596-4b5f-af33-0f6b8a9a40ff","Type":"ContainerStarted","Data":"ea902404367ae812e82499ed2d7eaa46e7607e8d309b5fc6bc930e071a2c3b27"} Sep 29 21:27:48 crc kubenswrapper[4911]: I0929 21:27:48.857372 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-ms4bf" event={"ID":"a47f0c8e-aa7c-456e-a69d-9b3f4c02bfb8","Type":"ContainerStarted","Data":"6c2ee77ab07cf8ac42833fac5ac8a6bbe8b7bc2339abab3692a98878423344f7"} Sep 29 21:27:48 crc kubenswrapper[4911]: I0929 21:27:48.873108 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2gvvc" event={"ID":"9323b64c-27c9-45cc-92bf-520f640e5126","Type":"ContainerStarted","Data":"b8ec3ad90ffbddadc4fda66b888190a86398e338c09ef49b0f9c634410d1555f"} Sep 29 21:27:48 crc kubenswrapper[4911]: I0929 21:27:48.885477 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-rtrxr" podStartSLOduration=126.885458098 podStartE2EDuration="2m6.885458098s" podCreationTimestamp="2025-09-29 21:25:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:27:48.88517666 +0000 UTC m=+146.862289331" watchObservedRunningTime="2025-09-29 21:27:48.885458098 +0000 UTC m=+146.862570789" Sep 29 21:27:48 crc kubenswrapper[4911]: I0929 21:27:48.898428 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2mk6j" event={"ID":"cd47cdce-41f4-416c-9436-4c386c50eb9e","Type":"ContainerStarted","Data":"bf6eb2fd3b582c5f217d8baac15ba10340748781ed425b7bc2babe5caded140f"} Sep 29 21:27:48 crc kubenswrapper[4911]: I0929 21:27:48.899614 4911 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-2mk6j container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.22:8443/healthz\": dial tcp 10.217.0.22:8443: connect: connection refused" start-of-body= Sep 29 21:27:48 crc kubenswrapper[4911]: I0929 21:27:48.899655 4911 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2mk6j" podUID="cd47cdce-41f4-416c-9436-4c386c50eb9e" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.22:8443/healthz\": dial tcp 10.217.0.22:8443: connect: connection refused" Sep 29 21:27:48 crc kubenswrapper[4911]: I0929 21:27:48.901163 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:48 crc kubenswrapper[4911]: E0929 21:27:48.906153 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 21:27:49.406131794 +0000 UTC m=+147.383244465 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8djg4" (UID: "4357f10f-dad3-4233-9d03-1cad6319e4a9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:48 crc kubenswrapper[4911]: I0929 21:27:48.922700 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-bgnq2" event={"ID":"36aba054-4229-40fb-8fd4-344cd9f61a40","Type":"ContainerStarted","Data":"c12bbd6a267411bf2b53194393370b4c56c28e4759933ba78e1a7ae7c4e9670e"} Sep 29 21:27:48 crc kubenswrapper[4911]: I0929 21:27:48.950517 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nh5bz" event={"ID":"8fd16ad3-3d7f-4b84-baf5-bb7a3dd07cd9","Type":"ContainerStarted","Data":"3c4bfd3d5a71021c62438d2cc92af1e9cca134a579b5d6ed9582df48d3e2c8a6"} Sep 29 21:27:48 crc kubenswrapper[4911]: I0929 21:27:48.950598 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nh5bz" event={"ID":"8fd16ad3-3d7f-4b84-baf5-bb7a3dd07cd9","Type":"ContainerStarted","Data":"9ba2120216c3e42bc63b52cb48e141dba2347def484ff2167cbe512891dc5d7a"} Sep 29 21:27:48 crc kubenswrapper[4911]: I0929 21:27:48.978243 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-kxz4b" event={"ID":"b6d3166a-4bc1-416c-b6f4-68207a1b0ccb","Type":"ContainerStarted","Data":"162717880c58a2277b2b3b77f98bc0f14fec65e78cc0167e0a6f07774da4a183"} Sep 29 21:27:49 crc kubenswrapper[4911]: I0929 21:27:49.002814 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:27:49 crc kubenswrapper[4911]: E0929 21:27:49.004260 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 21:27:49.50423978 +0000 UTC m=+147.481352461 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:49 crc kubenswrapper[4911]: I0929 21:27:49.018526 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5f6q5" event={"ID":"639123e9-fc56-4554-a59a-0d2c3866c340","Type":"ContainerStarted","Data":"84b514a65c30053d5b8ef17abdeedd11560edaa5acedced76a1924883b678a51"} Sep 29 21:27:49 crc kubenswrapper[4911]: I0929 21:27:49.044867 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-zqthg" podStartSLOduration=126.044846398 podStartE2EDuration="2m6.044846398s" podCreationTimestamp="2025-09-29 21:25:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:27:48.950410443 +0000 UTC m=+146.927523144" watchObservedRunningTime="2025-09-29 21:27:49.044846398 +0000 UTC m=+147.021959069" Sep 29 21:27:49 crc kubenswrapper[4911]: I0929 21:27:49.045877 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-cp2jw" podStartSLOduration=127.045870439 podStartE2EDuration="2m7.045870439s" podCreationTimestamp="2025-09-29 21:25:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:27:49.040320101 +0000 UTC m=+147.017432782" watchObservedRunningTime="2025-09-29 21:27:49.045870439 +0000 UTC m=+147.022983100" Sep 29 21:27:49 crc kubenswrapper[4911]: I0929 21:27:49.050407 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-2pbvr" event={"ID":"5d7dea83-ad4f-4f99-a281-1dcec6929b25","Type":"ContainerStarted","Data":"1d7bd073106142788ec9e829982f24018ab507c417a649b9f2ae14a622dbd353"} Sep 29 21:27:49 crc kubenswrapper[4911]: I0929 21:27:49.050460 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-2pbvr" event={"ID":"5d7dea83-ad4f-4f99-a281-1dcec6929b25","Type":"ContainerStarted","Data":"2d8fdcf76992f406b1178b2b0f39a62091195be51af67254eb3a893481908290"} Sep 29 21:27:49 crc kubenswrapper[4911]: I0929 21:27:49.050476 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-2pbvr" Sep 29 21:27:49 crc kubenswrapper[4911]: I0929 21:27:49.053779 4911 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-s8dhg container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.18:8080/healthz\": dial tcp 10.217.0.18:8080: connect: connection refused" start-of-body= Sep 29 21:27:49 crc kubenswrapper[4911]: I0929 21:27:49.053878 4911 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-s8dhg" podUID="615ffb8b-fb38-488c-b326-df6086017073" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.18:8080/healthz\": dial tcp 10.217.0.18:8080: connect: connection refused" Sep 29 21:27:49 crc kubenswrapper[4911]: I0929 21:27:49.054841 4911 patch_prober.go:28] interesting pod/downloads-7954f5f757-mrbmd container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Sep 29 21:27:49 crc kubenswrapper[4911]: I0929 21:27:49.054879 4911 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-mrbmd" podUID="7362e2ef-cab6-4891-bb59-a7969b6e72b6" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Sep 29 21:27:49 crc kubenswrapper[4911]: I0929 21:27:49.064879 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8gplk" Sep 29 21:27:49 crc kubenswrapper[4911]: I0929 21:27:49.130169 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:49 crc kubenswrapper[4911]: E0929 21:27:49.133905 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 21:27:49.63389006 +0000 UTC m=+147.611002731 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8djg4" (UID: "4357f10f-dad3-4233-9d03-1cad6319e4a9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:49 crc kubenswrapper[4911]: I0929 21:27:49.231548 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:27:49 crc kubenswrapper[4911]: E0929 21:27:49.232682 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 21:27:49.732656297 +0000 UTC m=+147.709768968 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:49 crc kubenswrapper[4911]: I0929 21:27:49.312136 4911 patch_prober.go:28] interesting pod/router-default-5444994796-gx2fb container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 21:27:49 crc kubenswrapper[4911]: [-]has-synced failed: reason withheld Sep 29 21:27:49 crc kubenswrapper[4911]: [+]process-running ok Sep 29 21:27:49 crc kubenswrapper[4911]: healthz check failed Sep 29 21:27:49 crc kubenswrapper[4911]: I0929 21:27:49.312210 4911 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-gx2fb" podUID="bead4097-1138-4381-9884-93bbf059b717" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 21:27:49 crc kubenswrapper[4911]: I0929 21:27:49.313029 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6sql5" podStartSLOduration=126.313011267 podStartE2EDuration="2m6.313011267s" podCreationTimestamp="2025-09-29 21:25:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:27:49.311099599 +0000 UTC m=+147.288212280" watchObservedRunningTime="2025-09-29 21:27:49.313011267 +0000 UTC m=+147.290123958" Sep 29 21:27:49 crc kubenswrapper[4911]: I0929 21:27:49.313160 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-nwjdw" podStartSLOduration=126.313154981 podStartE2EDuration="2m6.313154981s" podCreationTimestamp="2025-09-29 21:25:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:27:49.198653049 +0000 UTC m=+147.175765720" watchObservedRunningTime="2025-09-29 21:27:49.313154981 +0000 UTC m=+147.290267652" Sep 29 21:27:49 crc kubenswrapper[4911]: I0929 21:27:49.338866 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:49 crc kubenswrapper[4911]: E0929 21:27:49.339203 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 21:27:49.839189009 +0000 UTC m=+147.816301670 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8djg4" (UID: "4357f10f-dad3-4233-9d03-1cad6319e4a9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:49 crc kubenswrapper[4911]: I0929 21:27:49.439723 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:27:49 crc kubenswrapper[4911]: E0929 21:27:49.440270 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 21:27:49.940252805 +0000 UTC m=+147.917365476 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:49 crc kubenswrapper[4911]: I0929 21:27:49.482882 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-bgnq2" podStartSLOduration=126.482863933 podStartE2EDuration="2m6.482863933s" podCreationTimestamp="2025-09-29 21:25:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:27:49.482239585 +0000 UTC m=+147.459352256" watchObservedRunningTime="2025-09-29 21:27:49.482863933 +0000 UTC m=+147.459976604" Sep 29 21:27:49 crc kubenswrapper[4911]: I0929 21:27:49.496378 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-cz97x"] Sep 29 21:27:49 crc kubenswrapper[4911]: I0929 21:27:49.497449 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cz97x" Sep 29 21:27:49 crc kubenswrapper[4911]: I0929 21:27:49.515845 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Sep 29 21:27:49 crc kubenswrapper[4911]: I0929 21:27:49.531348 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-kxz4b" podStartSLOduration=128.531307808 podStartE2EDuration="2m8.531307808s" podCreationTimestamp="2025-09-29 21:25:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:27:49.522129141 +0000 UTC m=+147.499241812" watchObservedRunningTime="2025-09-29 21:27:49.531307808 +0000 UTC m=+147.508420499" Sep 29 21:27:49 crc kubenswrapper[4911]: I0929 21:27:49.543751 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4f7f281b-93f4-42fe-9996-2a5a4860ce88-catalog-content\") pod \"certified-operators-cz97x\" (UID: \"4f7f281b-93f4-42fe-9996-2a5a4860ce88\") " pod="openshift-marketplace/certified-operators-cz97x" Sep 29 21:27:49 crc kubenswrapper[4911]: I0929 21:27:49.543864 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kw74r\" (UniqueName: \"kubernetes.io/projected/4f7f281b-93f4-42fe-9996-2a5a4860ce88-kube-api-access-kw74r\") pod \"certified-operators-cz97x\" (UID: \"4f7f281b-93f4-42fe-9996-2a5a4860ce88\") " pod="openshift-marketplace/certified-operators-cz97x" Sep 29 21:27:49 crc kubenswrapper[4911]: I0929 21:27:49.543934 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:49 crc kubenswrapper[4911]: I0929 21:27:49.543958 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4f7f281b-93f4-42fe-9996-2a5a4860ce88-utilities\") pod \"certified-operators-cz97x\" (UID: \"4f7f281b-93f4-42fe-9996-2a5a4860ce88\") " pod="openshift-marketplace/certified-operators-cz97x" Sep 29 21:27:49 crc kubenswrapper[4911]: E0929 21:27:49.544288 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 21:27:50.0442757 +0000 UTC m=+148.021388371 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8djg4" (UID: "4357f10f-dad3-4233-9d03-1cad6319e4a9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:49 crc kubenswrapper[4911]: I0929 21:27:49.583611 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-cz97x"] Sep 29 21:27:49 crc kubenswrapper[4911]: I0929 21:27:49.646726 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:27:49 crc kubenswrapper[4911]: I0929 21:27:49.646935 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4f7f281b-93f4-42fe-9996-2a5a4860ce88-catalog-content\") pod \"certified-operators-cz97x\" (UID: \"4f7f281b-93f4-42fe-9996-2a5a4860ce88\") " pod="openshift-marketplace/certified-operators-cz97x" Sep 29 21:27:49 crc kubenswrapper[4911]: I0929 21:27:49.647002 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kw74r\" (UniqueName: \"kubernetes.io/projected/4f7f281b-93f4-42fe-9996-2a5a4860ce88-kube-api-access-kw74r\") pod \"certified-operators-cz97x\" (UID: \"4f7f281b-93f4-42fe-9996-2a5a4860ce88\") " pod="openshift-marketplace/certified-operators-cz97x" Sep 29 21:27:49 crc kubenswrapper[4911]: I0929 21:27:49.647074 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4f7f281b-93f4-42fe-9996-2a5a4860ce88-utilities\") pod \"certified-operators-cz97x\" (UID: \"4f7f281b-93f4-42fe-9996-2a5a4860ce88\") " pod="openshift-marketplace/certified-operators-cz97x" Sep 29 21:27:49 crc kubenswrapper[4911]: I0929 21:27:49.647547 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4f7f281b-93f4-42fe-9996-2a5a4860ce88-utilities\") pod \"certified-operators-cz97x\" (UID: \"4f7f281b-93f4-42fe-9996-2a5a4860ce88\") " pod="openshift-marketplace/certified-operators-cz97x" Sep 29 21:27:49 crc kubenswrapper[4911]: E0929 21:27:49.647628 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 21:27:50.147606405 +0000 UTC m=+148.124719076 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:49 crc kubenswrapper[4911]: I0929 21:27:49.647846 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4f7f281b-93f4-42fe-9996-2a5a4860ce88-catalog-content\") pod \"certified-operators-cz97x\" (UID: \"4f7f281b-93f4-42fe-9996-2a5a4860ce88\") " pod="openshift-marketplace/certified-operators-cz97x" Sep 29 21:27:49 crc kubenswrapper[4911]: I0929 21:27:49.666234 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5f6q5" podStartSLOduration=126.666211817 podStartE2EDuration="2m6.666211817s" podCreationTimestamp="2025-09-29 21:25:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:27:49.583420744 +0000 UTC m=+147.560533415" watchObservedRunningTime="2025-09-29 21:27:49.666211817 +0000 UTC m=+147.643324488" Sep 29 21:27:49 crc kubenswrapper[4911]: I0929 21:27:49.713420 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nh5bz" podStartSLOduration=126.713392844 podStartE2EDuration="2m6.713392844s" podCreationTimestamp="2025-09-29 21:25:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:27:49.675262422 +0000 UTC m=+147.652375093" watchObservedRunningTime="2025-09-29 21:27:49.713392844 +0000 UTC m=+147.690505515" Sep 29 21:27:49 crc kubenswrapper[4911]: I0929 21:27:49.715333 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-qksj7"] Sep 29 21:27:49 crc kubenswrapper[4911]: I0929 21:27:49.738246 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-qksj7"] Sep 29 21:27:49 crc kubenswrapper[4911]: I0929 21:27:49.738386 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qksj7" Sep 29 21:27:49 crc kubenswrapper[4911]: I0929 21:27:49.743572 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-2pbvr" podStartSLOduration=8.743542656 podStartE2EDuration="8.743542656s" podCreationTimestamp="2025-09-29 21:27:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:27:49.734126661 +0000 UTC m=+147.711239332" watchObservedRunningTime="2025-09-29 21:27:49.743542656 +0000 UTC m=+147.720655327" Sep 29 21:27:49 crc kubenswrapper[4911]: I0929 21:27:49.746550 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Sep 29 21:27:49 crc kubenswrapper[4911]: I0929 21:27:49.750552 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:49 crc kubenswrapper[4911]: E0929 21:27:49.750943 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 21:27:50.250930749 +0000 UTC m=+148.228043420 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8djg4" (UID: "4357f10f-dad3-4233-9d03-1cad6319e4a9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:49 crc kubenswrapper[4911]: I0929 21:27:49.752514 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kw74r\" (UniqueName: \"kubernetes.io/projected/4f7f281b-93f4-42fe-9996-2a5a4860ce88-kube-api-access-kw74r\") pod \"certified-operators-cz97x\" (UID: \"4f7f281b-93f4-42fe-9996-2a5a4860ce88\") " pod="openshift-marketplace/certified-operators-cz97x" Sep 29 21:27:49 crc kubenswrapper[4911]: I0929 21:27:49.848235 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cz97x" Sep 29 21:27:49 crc kubenswrapper[4911]: I0929 21:27:49.852388 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:27:49 crc kubenswrapper[4911]: I0929 21:27:49.852846 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f3bc61e-17ba-4f89-b582-1d4efd6d7146-catalog-content\") pod \"community-operators-qksj7\" (UID: \"2f3bc61e-17ba-4f89-b582-1d4efd6d7146\") " pod="openshift-marketplace/community-operators-qksj7" Sep 29 21:27:49 crc kubenswrapper[4911]: I0929 21:27:49.852884 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pl2l9\" (UniqueName: \"kubernetes.io/projected/2f3bc61e-17ba-4f89-b582-1d4efd6d7146-kube-api-access-pl2l9\") pod \"community-operators-qksj7\" (UID: \"2f3bc61e-17ba-4f89-b582-1d4efd6d7146\") " pod="openshift-marketplace/community-operators-qksj7" Sep 29 21:27:49 crc kubenswrapper[4911]: I0929 21:27:49.852939 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f3bc61e-17ba-4f89-b582-1d4efd6d7146-utilities\") pod \"community-operators-qksj7\" (UID: \"2f3bc61e-17ba-4f89-b582-1d4efd6d7146\") " pod="openshift-marketplace/community-operators-qksj7" Sep 29 21:27:49 crc kubenswrapper[4911]: E0929 21:27:49.853084 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 21:27:50.353060818 +0000 UTC m=+148.330173489 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:49 crc kubenswrapper[4911]: I0929 21:27:49.936503 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-tlx2w"] Sep 29 21:27:49 crc kubenswrapper[4911]: I0929 21:27:49.942023 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tlx2w" Sep 29 21:27:49 crc kubenswrapper[4911]: I0929 21:27:49.955566 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:27:49 crc kubenswrapper[4911]: I0929 21:27:49.955621 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f3bc61e-17ba-4f89-b582-1d4efd6d7146-catalog-content\") pod \"community-operators-qksj7\" (UID: \"2f3bc61e-17ba-4f89-b582-1d4efd6d7146\") " pod="openshift-marketplace/community-operators-qksj7" Sep 29 21:27:49 crc kubenswrapper[4911]: I0929 21:27:49.955648 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pl2l9\" (UniqueName: \"kubernetes.io/projected/2f3bc61e-17ba-4f89-b582-1d4efd6d7146-kube-api-access-pl2l9\") pod \"community-operators-qksj7\" (UID: \"2f3bc61e-17ba-4f89-b582-1d4efd6d7146\") " pod="openshift-marketplace/community-operators-qksj7" Sep 29 21:27:49 crc kubenswrapper[4911]: I0929 21:27:49.955691 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 21:27:49 crc kubenswrapper[4911]: I0929 21:27:49.955714 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 21:27:49 crc kubenswrapper[4911]: I0929 21:27:49.955739 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f3bc61e-17ba-4f89-b582-1d4efd6d7146-utilities\") pod \"community-operators-qksj7\" (UID: \"2f3bc61e-17ba-4f89-b582-1d4efd6d7146\") " pod="openshift-marketplace/community-operators-qksj7" Sep 29 21:27:49 crc kubenswrapper[4911]: I0929 21:27:49.955762 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:49 crc kubenswrapper[4911]: I0929 21:27:49.955816 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:27:49 crc kubenswrapper[4911]: I0929 21:27:49.959323 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f3bc61e-17ba-4f89-b582-1d4efd6d7146-utilities\") pod \"community-operators-qksj7\" (UID: \"2f3bc61e-17ba-4f89-b582-1d4efd6d7146\") " pod="openshift-marketplace/community-operators-qksj7" Sep 29 21:27:49 crc kubenswrapper[4911]: I0929 21:27:49.959462 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-tlx2w"] Sep 29 21:27:49 crc kubenswrapper[4911]: I0929 21:27:49.960004 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f3bc61e-17ba-4f89-b582-1d4efd6d7146-catalog-content\") pod \"community-operators-qksj7\" (UID: \"2f3bc61e-17ba-4f89-b582-1d4efd6d7146\") " pod="openshift-marketplace/community-operators-qksj7" Sep 29 21:27:49 crc kubenswrapper[4911]: E0929 21:27:49.962751 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 21:27:50.462730644 +0000 UTC m=+148.439843475 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8djg4" (UID: "4357f10f-dad3-4233-9d03-1cad6319e4a9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:49 crc kubenswrapper[4911]: I0929 21:27:49.964242 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:27:49 crc kubenswrapper[4911]: I0929 21:27:49.970613 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 21:27:49 crc kubenswrapper[4911]: I0929 21:27:49.973402 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 21:27:49 crc kubenswrapper[4911]: I0929 21:27:49.993087 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:27:50 crc kubenswrapper[4911]: I0929 21:27:50.031350 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 21:27:50 crc kubenswrapper[4911]: I0929 21:27:50.036460 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pl2l9\" (UniqueName: \"kubernetes.io/projected/2f3bc61e-17ba-4f89-b582-1d4efd6d7146-kube-api-access-pl2l9\") pod \"community-operators-qksj7\" (UID: \"2f3bc61e-17ba-4f89-b582-1d4efd6d7146\") " pod="openshift-marketplace/community-operators-qksj7" Sep 29 21:27:50 crc kubenswrapper[4911]: I0929 21:27:50.052996 4911 patch_prober.go:28] interesting pod/openshift-config-operator-7777fb866f-c5pgg container/openshift-config-operator namespace/openshift-config-operator: Readiness probe status=failure output="Get \"https://10.217.0.14:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Sep 29 21:27:50 crc kubenswrapper[4911]: I0929 21:27:50.053067 4911 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-config-operator/openshift-config-operator-7777fb866f-c5pgg" podUID="cd6c061e-79d1-4353-b077-69cb656a8823" containerName="openshift-config-operator" probeResult="failure" output="Get \"https://10.217.0.14:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Sep 29 21:27:50 crc kubenswrapper[4911]: I0929 21:27:50.053206 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 21:27:50 crc kubenswrapper[4911]: I0929 21:27:50.058529 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:27:50 crc kubenswrapper[4911]: I0929 21:27:50.058727 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e95e31d7-3d9d-4ee6-b1e8-f1595e4e398e-catalog-content\") pod \"certified-operators-tlx2w\" (UID: \"e95e31d7-3d9d-4ee6-b1e8-f1595e4e398e\") " pod="openshift-marketplace/certified-operators-tlx2w" Sep 29 21:27:50 crc kubenswrapper[4911]: I0929 21:27:50.058766 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7chz6\" (UniqueName: \"kubernetes.io/projected/e95e31d7-3d9d-4ee6-b1e8-f1595e4e398e-kube-api-access-7chz6\") pod \"certified-operators-tlx2w\" (UID: \"e95e31d7-3d9d-4ee6-b1e8-f1595e4e398e\") " pod="openshift-marketplace/certified-operators-tlx2w" Sep 29 21:27:50 crc kubenswrapper[4911]: I0929 21:27:50.058820 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e95e31d7-3d9d-4ee6-b1e8-f1595e4e398e-utilities\") pod \"certified-operators-tlx2w\" (UID: \"e95e31d7-3d9d-4ee6-b1e8-f1595e4e398e\") " pod="openshift-marketplace/certified-operators-tlx2w" Sep 29 21:27:50 crc kubenswrapper[4911]: E0929 21:27:50.059000 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 21:27:50.558979214 +0000 UTC m=+148.536091885 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:50 crc kubenswrapper[4911]: I0929 21:27:50.075140 4911 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-s8dhg container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.18:8080/healthz\": dial tcp 10.217.0.18:8080: connect: connection refused" start-of-body= Sep 29 21:27:50 crc kubenswrapper[4911]: I0929 21:27:50.075200 4911 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-s8dhg" podUID="615ffb8b-fb38-488c-b326-df6086017073" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.18:8080/healthz\": dial tcp 10.217.0.18:8080: connect: connection refused" Sep 29 21:27:50 crc kubenswrapper[4911]: I0929 21:27:50.090702 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-5kc9d"] Sep 29 21:27:50 crc kubenswrapper[4911]: I0929 21:27:50.092750 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5kc9d" Sep 29 21:27:50 crc kubenswrapper[4911]: I0929 21:27:50.093434 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qksj7" Sep 29 21:27:50 crc kubenswrapper[4911]: I0929 21:27:50.122413 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-5kc9d"] Sep 29 21:27:50 crc kubenswrapper[4911]: I0929 21:27:50.160492 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:50 crc kubenswrapper[4911]: I0929 21:27:50.160947 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f8f3e163-1816-4969-8594-c1a11760793a-catalog-content\") pod \"community-operators-5kc9d\" (UID: \"f8f3e163-1816-4969-8594-c1a11760793a\") " pod="openshift-marketplace/community-operators-5kc9d" Sep 29 21:27:50 crc kubenswrapper[4911]: I0929 21:27:50.161138 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-44txz\" (UniqueName: \"kubernetes.io/projected/f8f3e163-1816-4969-8594-c1a11760793a-kube-api-access-44txz\") pod \"community-operators-5kc9d\" (UID: \"f8f3e163-1816-4969-8594-c1a11760793a\") " pod="openshift-marketplace/community-operators-5kc9d" Sep 29 21:27:50 crc kubenswrapper[4911]: I0929 21:27:50.161179 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e95e31d7-3d9d-4ee6-b1e8-f1595e4e398e-catalog-content\") pod \"certified-operators-tlx2w\" (UID: \"e95e31d7-3d9d-4ee6-b1e8-f1595e4e398e\") " pod="openshift-marketplace/certified-operators-tlx2w" Sep 29 21:27:50 crc kubenswrapper[4911]: I0929 21:27:50.161262 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7chz6\" (UniqueName: \"kubernetes.io/projected/e95e31d7-3d9d-4ee6-b1e8-f1595e4e398e-kube-api-access-7chz6\") pod \"certified-operators-tlx2w\" (UID: \"e95e31d7-3d9d-4ee6-b1e8-f1595e4e398e\") " pod="openshift-marketplace/certified-operators-tlx2w" Sep 29 21:27:50 crc kubenswrapper[4911]: I0929 21:27:50.161428 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f8f3e163-1816-4969-8594-c1a11760793a-utilities\") pod \"community-operators-5kc9d\" (UID: \"f8f3e163-1816-4969-8594-c1a11760793a\") " pod="openshift-marketplace/community-operators-5kc9d" Sep 29 21:27:50 crc kubenswrapper[4911]: I0929 21:27:50.161453 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e95e31d7-3d9d-4ee6-b1e8-f1595e4e398e-utilities\") pod \"certified-operators-tlx2w\" (UID: \"e95e31d7-3d9d-4ee6-b1e8-f1595e4e398e\") " pod="openshift-marketplace/certified-operators-tlx2w" Sep 29 21:27:50 crc kubenswrapper[4911]: E0929 21:27:50.162766 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 21:27:50.662748782 +0000 UTC m=+148.639861453 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8djg4" (UID: "4357f10f-dad3-4233-9d03-1cad6319e4a9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:50 crc kubenswrapper[4911]: I0929 21:27:50.169595 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e95e31d7-3d9d-4ee6-b1e8-f1595e4e398e-catalog-content\") pod \"certified-operators-tlx2w\" (UID: \"e95e31d7-3d9d-4ee6-b1e8-f1595e4e398e\") " pod="openshift-marketplace/certified-operators-tlx2w" Sep 29 21:27:50 crc kubenswrapper[4911]: I0929 21:27:50.171922 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2mk6j" Sep 29 21:27:50 crc kubenswrapper[4911]: I0929 21:27:50.191675 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e95e31d7-3d9d-4ee6-b1e8-f1595e4e398e-utilities\") pod \"certified-operators-tlx2w\" (UID: \"e95e31d7-3d9d-4ee6-b1e8-f1595e4e398e\") " pod="openshift-marketplace/certified-operators-tlx2w" Sep 29 21:27:50 crc kubenswrapper[4911]: I0929 21:27:50.231771 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 21:27:50 crc kubenswrapper[4911]: I0929 21:27:50.274496 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7chz6\" (UniqueName: \"kubernetes.io/projected/e95e31d7-3d9d-4ee6-b1e8-f1595e4e398e-kube-api-access-7chz6\") pod \"certified-operators-tlx2w\" (UID: \"e95e31d7-3d9d-4ee6-b1e8-f1595e4e398e\") " pod="openshift-marketplace/certified-operators-tlx2w" Sep 29 21:27:50 crc kubenswrapper[4911]: I0929 21:27:50.289983 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:27:50 crc kubenswrapper[4911]: E0929 21:27:50.292848 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 21:27:50.792766254 +0000 UTC m=+148.769878925 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:50 crc kubenswrapper[4911]: I0929 21:27:50.293037 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f8f3e163-1816-4969-8594-c1a11760793a-utilities\") pod \"community-operators-5kc9d\" (UID: \"f8f3e163-1816-4969-8594-c1a11760793a\") " pod="openshift-marketplace/community-operators-5kc9d" Sep 29 21:27:50 crc kubenswrapper[4911]: I0929 21:27:50.293132 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:50 crc kubenswrapper[4911]: I0929 21:27:50.293208 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f8f3e163-1816-4969-8594-c1a11760793a-catalog-content\") pod \"community-operators-5kc9d\" (UID: \"f8f3e163-1816-4969-8594-c1a11760793a\") " pod="openshift-marketplace/community-operators-5kc9d" Sep 29 21:27:50 crc kubenswrapper[4911]: I0929 21:27:50.293710 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f8f3e163-1816-4969-8594-c1a11760793a-catalog-content\") pod \"community-operators-5kc9d\" (UID: \"f8f3e163-1816-4969-8594-c1a11760793a\") " pod="openshift-marketplace/community-operators-5kc9d" Sep 29 21:27:50 crc kubenswrapper[4911]: E0929 21:27:50.295358 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 21:27:50.795324942 +0000 UTC m=+148.772437613 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8djg4" (UID: "4357f10f-dad3-4233-9d03-1cad6319e4a9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:50 crc kubenswrapper[4911]: I0929 21:27:50.298690 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f8f3e163-1816-4969-8594-c1a11760793a-utilities\") pod \"community-operators-5kc9d\" (UID: \"f8f3e163-1816-4969-8594-c1a11760793a\") " pod="openshift-marketplace/community-operators-5kc9d" Sep 29 21:27:50 crc kubenswrapper[4911]: I0929 21:27:50.312156 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tlx2w" Sep 29 21:27:50 crc kubenswrapper[4911]: I0929 21:27:50.331627 4911 patch_prober.go:28] interesting pod/router-default-5444994796-gx2fb container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 21:27:50 crc kubenswrapper[4911]: [-]has-synced failed: reason withheld Sep 29 21:27:50 crc kubenswrapper[4911]: [+]process-running ok Sep 29 21:27:50 crc kubenswrapper[4911]: healthz check failed Sep 29 21:27:50 crc kubenswrapper[4911]: I0929 21:27:50.331723 4911 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-gx2fb" podUID="bead4097-1138-4381-9884-93bbf059b717" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 21:27:50 crc kubenswrapper[4911]: I0929 21:27:50.400503 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:27:50 crc kubenswrapper[4911]: I0929 21:27:50.400888 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-44txz\" (UniqueName: \"kubernetes.io/projected/f8f3e163-1816-4969-8594-c1a11760793a-kube-api-access-44txz\") pod \"community-operators-5kc9d\" (UID: \"f8f3e163-1816-4969-8594-c1a11760793a\") " pod="openshift-marketplace/community-operators-5kc9d" Sep 29 21:27:50 crc kubenswrapper[4911]: E0929 21:27:50.401308 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 21:27:50.901293506 +0000 UTC m=+148.878406177 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:50 crc kubenswrapper[4911]: I0929 21:27:50.493367 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-c5pgg" Sep 29 21:27:50 crc kubenswrapper[4911]: I0929 21:27:50.501957 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:50 crc kubenswrapper[4911]: E0929 21:27:50.502297 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 21:27:51.00228373 +0000 UTC m=+148.979396391 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8djg4" (UID: "4357f10f-dad3-4233-9d03-1cad6319e4a9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:50 crc kubenswrapper[4911]: I0929 21:27:50.579021 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-44txz\" (UniqueName: \"kubernetes.io/projected/f8f3e163-1816-4969-8594-c1a11760793a-kube-api-access-44txz\") pod \"community-operators-5kc9d\" (UID: \"f8f3e163-1816-4969-8594-c1a11760793a\") " pod="openshift-marketplace/community-operators-5kc9d" Sep 29 21:27:50 crc kubenswrapper[4911]: I0929 21:27:50.612384 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:27:50 crc kubenswrapper[4911]: E0929 21:27:50.613918 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 21:27:51.113894295 +0000 UTC m=+149.091006956 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:50 crc kubenswrapper[4911]: I0929 21:27:50.716992 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:50 crc kubenswrapper[4911]: E0929 21:27:50.717430 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 21:27:51.217418185 +0000 UTC m=+149.194530856 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8djg4" (UID: "4357f10f-dad3-4233-9d03-1cad6319e4a9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:50 crc kubenswrapper[4911]: I0929 21:27:50.730497 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5kc9d" Sep 29 21:27:50 crc kubenswrapper[4911]: I0929 21:27:50.820462 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:27:50 crc kubenswrapper[4911]: E0929 21:27:50.820874 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 21:27:51.320857274 +0000 UTC m=+149.297969945 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:50 crc kubenswrapper[4911]: I0929 21:27:50.922724 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:50 crc kubenswrapper[4911]: E0929 21:27:50.923121 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 21:27:51.423108265 +0000 UTC m=+149.400220936 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8djg4" (UID: "4357f10f-dad3-4233-9d03-1cad6319e4a9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:51 crc kubenswrapper[4911]: I0929 21:27:51.025330 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:27:51 crc kubenswrapper[4911]: E0929 21:27:51.025956 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 21:27:51.525923735 +0000 UTC m=+149.503036406 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:51 crc kubenswrapper[4911]: I0929 21:27:51.026005 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:51 crc kubenswrapper[4911]: E0929 21:27:51.026391 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 21:27:51.526376718 +0000 UTC m=+149.503489389 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8djg4" (UID: "4357f10f-dad3-4233-9d03-1cad6319e4a9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:51 crc kubenswrapper[4911]: I0929 21:27:51.069966 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-cz97x"] Sep 29 21:27:51 crc kubenswrapper[4911]: I0929 21:27:51.119915 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-grwjm" event={"ID":"ae2112f8-47a1-4faf-8ee6-83f96c5a3def","Type":"ContainerStarted","Data":"838a0503d03c400265e2658aa479867fd607b7fb588d2d30b131f538ea90db8c"} Sep 29 21:27:51 crc kubenswrapper[4911]: I0929 21:27:51.128330 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:27:51 crc kubenswrapper[4911]: E0929 21:27:51.128648 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 21:27:51.628632821 +0000 UTC m=+149.605745492 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:51 crc kubenswrapper[4911]: I0929 21:27:51.230919 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:51 crc kubenswrapper[4911]: E0929 21:27:51.232292 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 21:27:51.732265214 +0000 UTC m=+149.709377885 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8djg4" (UID: "4357f10f-dad3-4233-9d03-1cad6319e4a9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:51 crc kubenswrapper[4911]: I0929 21:27:51.332410 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:27:51 crc kubenswrapper[4911]: E0929 21:27:51.333265 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 21:27:51.833231528 +0000 UTC m=+149.810344199 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:51 crc kubenswrapper[4911]: I0929 21:27:51.340890 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Sep 29 21:27:51 crc kubenswrapper[4911]: I0929 21:27:51.341840 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 21:27:51 crc kubenswrapper[4911]: I0929 21:27:51.343581 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Sep 29 21:27:51 crc kubenswrapper[4911]: I0929 21:27:51.349478 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Sep 29 21:27:51 crc kubenswrapper[4911]: I0929 21:27:51.349711 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Sep 29 21:27:51 crc kubenswrapper[4911]: I0929 21:27:51.428046 4911 patch_prober.go:28] interesting pod/router-default-5444994796-gx2fb container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 21:27:51 crc kubenswrapper[4911]: [-]has-synced failed: reason withheld Sep 29 21:27:51 crc kubenswrapper[4911]: [+]process-running ok Sep 29 21:27:51 crc kubenswrapper[4911]: healthz check failed Sep 29 21:27:51 crc kubenswrapper[4911]: I0929 21:27:51.428099 4911 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-gx2fb" podUID="bead4097-1138-4381-9884-93bbf059b717" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 21:27:51 crc kubenswrapper[4911]: I0929 21:27:51.434523 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:51 crc kubenswrapper[4911]: E0929 21:27:51.434991 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 21:27:51.934975164 +0000 UTC m=+149.912087835 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8djg4" (UID: "4357f10f-dad3-4233-9d03-1cad6319e4a9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:51 crc kubenswrapper[4911]: I0929 21:27:51.478861 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-hxt4d"] Sep 29 21:27:51 crc kubenswrapper[4911]: I0929 21:27:51.480000 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hxt4d" Sep 29 21:27:51 crc kubenswrapper[4911]: I0929 21:27:51.495551 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Sep 29 21:27:51 crc kubenswrapper[4911]: I0929 21:27:51.504210 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-hxt4d"] Sep 29 21:27:51 crc kubenswrapper[4911]: I0929 21:27:51.536640 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:27:51 crc kubenswrapper[4911]: I0929 21:27:51.536937 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/587286a6-f2ab-423c-8569-9f9d2b103edf-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"587286a6-f2ab-423c-8569-9f9d2b103edf\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 21:27:51 crc kubenswrapper[4911]: I0929 21:27:51.536980 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/587286a6-f2ab-423c-8569-9f9d2b103edf-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"587286a6-f2ab-423c-8569-9f9d2b103edf\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 21:27:51 crc kubenswrapper[4911]: E0929 21:27:51.537123 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 21:27:52.037105882 +0000 UTC m=+150.014218553 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:51 crc kubenswrapper[4911]: I0929 21:27:51.638859 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:51 crc kubenswrapper[4911]: I0929 21:27:51.639218 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/596b137a-5101-4a31-85b8-050945c1de9b-utilities\") pod \"redhat-marketplace-hxt4d\" (UID: \"596b137a-5101-4a31-85b8-050945c1de9b\") " pod="openshift-marketplace/redhat-marketplace-hxt4d" Sep 29 21:27:51 crc kubenswrapper[4911]: I0929 21:27:51.639263 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/587286a6-f2ab-423c-8569-9f9d2b103edf-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"587286a6-f2ab-423c-8569-9f9d2b103edf\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 21:27:51 crc kubenswrapper[4911]: I0929 21:27:51.639290 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/596b137a-5101-4a31-85b8-050945c1de9b-catalog-content\") pod \"redhat-marketplace-hxt4d\" (UID: \"596b137a-5101-4a31-85b8-050945c1de9b\") " pod="openshift-marketplace/redhat-marketplace-hxt4d" Sep 29 21:27:51 crc kubenswrapper[4911]: I0929 21:27:51.639316 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/587286a6-f2ab-423c-8569-9f9d2b103edf-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"587286a6-f2ab-423c-8569-9f9d2b103edf\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 21:27:51 crc kubenswrapper[4911]: I0929 21:27:51.639335 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qkznl\" (UniqueName: \"kubernetes.io/projected/596b137a-5101-4a31-85b8-050945c1de9b-kube-api-access-qkznl\") pod \"redhat-marketplace-hxt4d\" (UID: \"596b137a-5101-4a31-85b8-050945c1de9b\") " pod="openshift-marketplace/redhat-marketplace-hxt4d" Sep 29 21:27:51 crc kubenswrapper[4911]: E0929 21:27:51.639701 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 21:27:52.139686284 +0000 UTC m=+150.116798955 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8djg4" (UID: "4357f10f-dad3-4233-9d03-1cad6319e4a9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:51 crc kubenswrapper[4911]: I0929 21:27:51.639744 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/587286a6-f2ab-423c-8569-9f9d2b103edf-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"587286a6-f2ab-423c-8569-9f9d2b103edf\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 21:27:51 crc kubenswrapper[4911]: I0929 21:27:51.687838 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/587286a6-f2ab-423c-8569-9f9d2b103edf-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"587286a6-f2ab-423c-8569-9f9d2b103edf\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 21:27:51 crc kubenswrapper[4911]: I0929 21:27:51.699462 4911 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Sep 29 21:27:51 crc kubenswrapper[4911]: I0929 21:27:51.714568 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-tlx2w"] Sep 29 21:27:51 crc kubenswrapper[4911]: I0929 21:27:51.750566 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:27:51 crc kubenswrapper[4911]: I0929 21:27:51.750946 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/596b137a-5101-4a31-85b8-050945c1de9b-catalog-content\") pod \"redhat-marketplace-hxt4d\" (UID: \"596b137a-5101-4a31-85b8-050945c1de9b\") " pod="openshift-marketplace/redhat-marketplace-hxt4d" Sep 29 21:27:51 crc kubenswrapper[4911]: I0929 21:27:51.750988 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qkznl\" (UniqueName: \"kubernetes.io/projected/596b137a-5101-4a31-85b8-050945c1de9b-kube-api-access-qkznl\") pod \"redhat-marketplace-hxt4d\" (UID: \"596b137a-5101-4a31-85b8-050945c1de9b\") " pod="openshift-marketplace/redhat-marketplace-hxt4d" Sep 29 21:27:51 crc kubenswrapper[4911]: I0929 21:27:51.751044 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/596b137a-5101-4a31-85b8-050945c1de9b-utilities\") pod \"redhat-marketplace-hxt4d\" (UID: \"596b137a-5101-4a31-85b8-050945c1de9b\") " pod="openshift-marketplace/redhat-marketplace-hxt4d" Sep 29 21:27:51 crc kubenswrapper[4911]: I0929 21:27:51.751506 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/596b137a-5101-4a31-85b8-050945c1de9b-utilities\") pod \"redhat-marketplace-hxt4d\" (UID: \"596b137a-5101-4a31-85b8-050945c1de9b\") " pod="openshift-marketplace/redhat-marketplace-hxt4d" Sep 29 21:27:51 crc kubenswrapper[4911]: E0929 21:27:51.751597 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 21:27:52.251577048 +0000 UTC m=+150.228689719 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:51 crc kubenswrapper[4911]: I0929 21:27:51.752275 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/596b137a-5101-4a31-85b8-050945c1de9b-catalog-content\") pod \"redhat-marketplace-hxt4d\" (UID: \"596b137a-5101-4a31-85b8-050945c1de9b\") " pod="openshift-marketplace/redhat-marketplace-hxt4d" Sep 29 21:27:51 crc kubenswrapper[4911]: I0929 21:27:51.783230 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 21:27:51 crc kubenswrapper[4911]: I0929 21:27:51.790647 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qkznl\" (UniqueName: \"kubernetes.io/projected/596b137a-5101-4a31-85b8-050945c1de9b-kube-api-access-qkznl\") pod \"redhat-marketplace-hxt4d\" (UID: \"596b137a-5101-4a31-85b8-050945c1de9b\") " pod="openshift-marketplace/redhat-marketplace-hxt4d" Sep 29 21:27:51 crc kubenswrapper[4911]: I0929 21:27:51.804719 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-qksj7"] Sep 29 21:27:51 crc kubenswrapper[4911]: I0929 21:27:51.860583 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:51 crc kubenswrapper[4911]: I0929 21:27:51.861137 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hxt4d" Sep 29 21:27:51 crc kubenswrapper[4911]: E0929 21:27:51.862267 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 21:27:52.362243514 +0000 UTC m=+150.339356185 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8djg4" (UID: "4357f10f-dad3-4233-9d03-1cad6319e4a9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:51 crc kubenswrapper[4911]: I0929 21:27:51.898506 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-8xxks"] Sep 29 21:27:51 crc kubenswrapper[4911]: I0929 21:27:51.902494 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8xxks" Sep 29 21:27:51 crc kubenswrapper[4911]: I0929 21:27:51.907900 4911 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-09-29T21:27:51.699492733Z","Handler":null,"Name":""} Sep 29 21:27:51 crc kubenswrapper[4911]: I0929 21:27:51.923332 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-8xxks"] Sep 29 21:27:51 crc kubenswrapper[4911]: I0929 21:27:51.970718 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:27:51 crc kubenswrapper[4911]: E0929 21:27:51.971187 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 21:27:52.471164728 +0000 UTC m=+150.448277399 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 21:27:51 crc kubenswrapper[4911]: I0929 21:27:51.987151 4911 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Sep 29 21:27:51 crc kubenswrapper[4911]: I0929 21:27:51.987203 4911 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Sep 29 21:27:52 crc kubenswrapper[4911]: I0929 21:27:52.078109 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5be9e2b9-d97c-40c1-9179-2703b878679b-catalog-content\") pod \"redhat-marketplace-8xxks\" (UID: \"5be9e2b9-d97c-40c1-9179-2703b878679b\") " pod="openshift-marketplace/redhat-marketplace-8xxks" Sep 29 21:27:52 crc kubenswrapper[4911]: I0929 21:27:52.078191 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6jn4k\" (UniqueName: \"kubernetes.io/projected/5be9e2b9-d97c-40c1-9179-2703b878679b-kube-api-access-6jn4k\") pod \"redhat-marketplace-8xxks\" (UID: \"5be9e2b9-d97c-40c1-9179-2703b878679b\") " pod="openshift-marketplace/redhat-marketplace-8xxks" Sep 29 21:27:52 crc kubenswrapper[4911]: I0929 21:27:52.078259 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:52 crc kubenswrapper[4911]: I0929 21:27:52.078280 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5be9e2b9-d97c-40c1-9179-2703b878679b-utilities\") pod \"redhat-marketplace-8xxks\" (UID: \"5be9e2b9-d97c-40c1-9179-2703b878679b\") " pod="openshift-marketplace/redhat-marketplace-8xxks" Sep 29 21:27:52 crc kubenswrapper[4911]: I0929 21:27:52.092041 4911 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 29 21:27:52 crc kubenswrapper[4911]: I0929 21:27:52.092103 4911 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:52 crc kubenswrapper[4911]: I0929 21:27:52.177456 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-5kc9d"] Sep 29 21:27:52 crc kubenswrapper[4911]: I0929 21:27:52.198920 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"5ed1603efa7b6ea93befd303356acd9f2be3ff46fde46cb133be89a7311c6e82"} Sep 29 21:27:52 crc kubenswrapper[4911]: I0929 21:27:52.198963 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"642bcad80c3d3fb9acb884a90f69888e7ba3f01e0180588d7eb173444cdb5035"} Sep 29 21:27:52 crc kubenswrapper[4911]: I0929 21:27:52.181222 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5be9e2b9-d97c-40c1-9179-2703b878679b-catalog-content\") pod \"redhat-marketplace-8xxks\" (UID: \"5be9e2b9-d97c-40c1-9179-2703b878679b\") " pod="openshift-marketplace/redhat-marketplace-8xxks" Sep 29 21:27:52 crc kubenswrapper[4911]: I0929 21:27:52.200317 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6jn4k\" (UniqueName: \"kubernetes.io/projected/5be9e2b9-d97c-40c1-9179-2703b878679b-kube-api-access-6jn4k\") pod \"redhat-marketplace-8xxks\" (UID: \"5be9e2b9-d97c-40c1-9179-2703b878679b\") " pod="openshift-marketplace/redhat-marketplace-8xxks" Sep 29 21:27:52 crc kubenswrapper[4911]: I0929 21:27:52.200420 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5be9e2b9-d97c-40c1-9179-2703b878679b-utilities\") pod \"redhat-marketplace-8xxks\" (UID: \"5be9e2b9-d97c-40c1-9179-2703b878679b\") " pod="openshift-marketplace/redhat-marketplace-8xxks" Sep 29 21:27:52 crc kubenswrapper[4911]: I0929 21:27:52.200906 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5be9e2b9-d97c-40c1-9179-2703b878679b-utilities\") pod \"redhat-marketplace-8xxks\" (UID: \"5be9e2b9-d97c-40c1-9179-2703b878679b\") " pod="openshift-marketplace/redhat-marketplace-8xxks" Sep 29 21:27:52 crc kubenswrapper[4911]: I0929 21:27:52.181614 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5be9e2b9-d97c-40c1-9179-2703b878679b-catalog-content\") pod \"redhat-marketplace-8xxks\" (UID: \"5be9e2b9-d97c-40c1-9179-2703b878679b\") " pod="openshift-marketplace/redhat-marketplace-8xxks" Sep 29 21:27:52 crc kubenswrapper[4911]: I0929 21:27:52.200972 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 21:27:52 crc kubenswrapper[4911]: I0929 21:27:52.236653 4911 generic.go:334] "Generic (PLEG): container finished" podID="4f7f281b-93f4-42fe-9996-2a5a4860ce88" containerID="c9bc50e5782112b86ed302a0fb42c047cb203268f58b6c3d66d93deb5e6fe3d5" exitCode=0 Sep 29 21:27:52 crc kubenswrapper[4911]: I0929 21:27:52.236808 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cz97x" event={"ID":"4f7f281b-93f4-42fe-9996-2a5a4860ce88","Type":"ContainerDied","Data":"c9bc50e5782112b86ed302a0fb42c047cb203268f58b6c3d66d93deb5e6fe3d5"} Sep 29 21:27:52 crc kubenswrapper[4911]: I0929 21:27:52.236847 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cz97x" event={"ID":"4f7f281b-93f4-42fe-9996-2a5a4860ce88","Type":"ContainerStarted","Data":"2676ad159071d6440f5ab5bd28fef5363c3beaf07610358d1d623a5ace283453"} Sep 29 21:27:52 crc kubenswrapper[4911]: I0929 21:27:52.243672 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8djg4\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:52 crc kubenswrapper[4911]: I0929 21:27:52.274899 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-grwjm" event={"ID":"ae2112f8-47a1-4faf-8ee6-83f96c5a3def","Type":"ContainerStarted","Data":"2ce851dcb915a981cf644ef6071995b41f3a0f6dd302f8d03ba72807d733c43f"} Sep 29 21:27:52 crc kubenswrapper[4911]: I0929 21:27:52.275938 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6jn4k\" (UniqueName: \"kubernetes.io/projected/5be9e2b9-d97c-40c1-9179-2703b878679b-kube-api-access-6jn4k\") pod \"redhat-marketplace-8xxks\" (UID: \"5be9e2b9-d97c-40c1-9179-2703b878679b\") " pod="openshift-marketplace/redhat-marketplace-8xxks" Sep 29 21:27:52 crc kubenswrapper[4911]: I0929 21:27:52.276283 4911 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 21:27:52 crc kubenswrapper[4911]: I0929 21:27:52.288357 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qksj7" event={"ID":"2f3bc61e-17ba-4f89-b582-1d4efd6d7146","Type":"ContainerStarted","Data":"571dc35a825eefea0984d93784f7ebc9cb512e0a4915c2a8ecd39eaccd1f6428"} Sep 29 21:27:52 crc kubenswrapper[4911]: I0929 21:27:52.301765 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 21:27:52 crc kubenswrapper[4911]: I0929 21:27:52.324252 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tlx2w" event={"ID":"e95e31d7-3d9d-4ee6-b1e8-f1595e4e398e","Type":"ContainerStarted","Data":"4f00b5b3656cf8886c0fca8b24c259a030805d214a2e8428c3967bc06fdb067d"} Sep 29 21:27:52 crc kubenswrapper[4911]: I0929 21:27:52.328292 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8xxks" Sep 29 21:27:52 crc kubenswrapper[4911]: I0929 21:27:52.338281 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:52 crc kubenswrapper[4911]: I0929 21:27:52.355157 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"b7ed0f02f056ce2a3891bfe794cf248e2ce040a7089ba6cd8f975123cbe3afe9"} Sep 29 21:27:52 crc kubenswrapper[4911]: I0929 21:27:52.355222 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"8ef8d011266a53cdd2f83442ecef0ed09f63b31bd504f987608323976f10bbdc"} Sep 29 21:27:52 crc kubenswrapper[4911]: I0929 21:27:52.443133 4911 patch_prober.go:28] interesting pod/router-default-5444994796-gx2fb container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 21:27:52 crc kubenswrapper[4911]: [-]has-synced failed: reason withheld Sep 29 21:27:52 crc kubenswrapper[4911]: [+]process-running ok Sep 29 21:27:52 crc kubenswrapper[4911]: healthz check failed Sep 29 21:27:52 crc kubenswrapper[4911]: I0929 21:27:52.443234 4911 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-gx2fb" podUID="bead4097-1138-4381-9884-93bbf059b717" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 21:27:52 crc kubenswrapper[4911]: I0929 21:27:52.519515 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Sep 29 21:27:52 crc kubenswrapper[4911]: I0929 21:27:52.640545 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-hxt4d"] Sep 29 21:27:52 crc kubenswrapper[4911]: W0929 21:27:52.652017 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod596b137a_5101_4a31_85b8_050945c1de9b.slice/crio-122a95699c8b7ad6c0a4f25d49c25b157055a28a10108321f27866606cfe29a2 WatchSource:0}: Error finding container 122a95699c8b7ad6c0a4f25d49c25b157055a28a10108321f27866606cfe29a2: Status 404 returned error can't find the container with id 122a95699c8b7ad6c0a4f25d49c25b157055a28a10108321f27866606cfe29a2 Sep 29 21:27:52 crc kubenswrapper[4911]: I0929 21:27:52.674130 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-m9d9l"] Sep 29 21:27:52 crc kubenswrapper[4911]: I0929 21:27:52.676321 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m9d9l" Sep 29 21:27:52 crc kubenswrapper[4911]: I0929 21:27:52.676976 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-m9d9l"] Sep 29 21:27:52 crc kubenswrapper[4911]: I0929 21:27:52.680048 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Sep 29 21:27:52 crc kubenswrapper[4911]: I0929 21:27:52.696492 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Sep 29 21:27:52 crc kubenswrapper[4911]: I0929 21:27:52.756743 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Sep 29 21:27:52 crc kubenswrapper[4911]: I0929 21:27:52.759250 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fd54ef5c-5d59-4326-aba3-ba1a915313c9-utilities\") pod \"redhat-operators-m9d9l\" (UID: \"fd54ef5c-5d59-4326-aba3-ba1a915313c9\") " pod="openshift-marketplace/redhat-operators-m9d9l" Sep 29 21:27:52 crc kubenswrapper[4911]: I0929 21:27:52.759402 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2p4v9\" (UniqueName: \"kubernetes.io/projected/fd54ef5c-5d59-4326-aba3-ba1a915313c9-kube-api-access-2p4v9\") pod \"redhat-operators-m9d9l\" (UID: \"fd54ef5c-5d59-4326-aba3-ba1a915313c9\") " pod="openshift-marketplace/redhat-operators-m9d9l" Sep 29 21:27:52 crc kubenswrapper[4911]: I0929 21:27:52.759463 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fd54ef5c-5d59-4326-aba3-ba1a915313c9-catalog-content\") pod \"redhat-operators-m9d9l\" (UID: \"fd54ef5c-5d59-4326-aba3-ba1a915313c9\") " pod="openshift-marketplace/redhat-operators-m9d9l" Sep 29 21:27:52 crc kubenswrapper[4911]: I0929 21:27:52.849860 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-8djg4"] Sep 29 21:27:52 crc kubenswrapper[4911]: I0929 21:27:52.861662 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2p4v9\" (UniqueName: \"kubernetes.io/projected/fd54ef5c-5d59-4326-aba3-ba1a915313c9-kube-api-access-2p4v9\") pod \"redhat-operators-m9d9l\" (UID: \"fd54ef5c-5d59-4326-aba3-ba1a915313c9\") " pod="openshift-marketplace/redhat-operators-m9d9l" Sep 29 21:27:52 crc kubenswrapper[4911]: I0929 21:27:52.861711 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fd54ef5c-5d59-4326-aba3-ba1a915313c9-catalog-content\") pod \"redhat-operators-m9d9l\" (UID: \"fd54ef5c-5d59-4326-aba3-ba1a915313c9\") " pod="openshift-marketplace/redhat-operators-m9d9l" Sep 29 21:27:52 crc kubenswrapper[4911]: I0929 21:27:52.861765 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fd54ef5c-5d59-4326-aba3-ba1a915313c9-utilities\") pod \"redhat-operators-m9d9l\" (UID: \"fd54ef5c-5d59-4326-aba3-ba1a915313c9\") " pod="openshift-marketplace/redhat-operators-m9d9l" Sep 29 21:27:52 crc kubenswrapper[4911]: I0929 21:27:52.862209 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fd54ef5c-5d59-4326-aba3-ba1a915313c9-utilities\") pod \"redhat-operators-m9d9l\" (UID: \"fd54ef5c-5d59-4326-aba3-ba1a915313c9\") " pod="openshift-marketplace/redhat-operators-m9d9l" Sep 29 21:27:52 crc kubenswrapper[4911]: I0929 21:27:52.862697 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fd54ef5c-5d59-4326-aba3-ba1a915313c9-catalog-content\") pod \"redhat-operators-m9d9l\" (UID: \"fd54ef5c-5d59-4326-aba3-ba1a915313c9\") " pod="openshift-marketplace/redhat-operators-m9d9l" Sep 29 21:27:52 crc kubenswrapper[4911]: I0929 21:27:52.932969 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2p4v9\" (UniqueName: \"kubernetes.io/projected/fd54ef5c-5d59-4326-aba3-ba1a915313c9-kube-api-access-2p4v9\") pod \"redhat-operators-m9d9l\" (UID: \"fd54ef5c-5d59-4326-aba3-ba1a915313c9\") " pod="openshift-marketplace/redhat-operators-m9d9l" Sep 29 21:27:52 crc kubenswrapper[4911]: I0929 21:27:52.938026 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-8xxks"] Sep 29 21:27:52 crc kubenswrapper[4911]: W0929 21:27:52.969624 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5be9e2b9_d97c_40c1_9179_2703b878679b.slice/crio-809523b63ce5f678b446777ccb64d99daa9545602c700a3fd356fb4983de3917 WatchSource:0}: Error finding container 809523b63ce5f678b446777ccb64d99daa9545602c700a3fd356fb4983de3917: Status 404 returned error can't find the container with id 809523b63ce5f678b446777ccb64d99daa9545602c700a3fd356fb4983de3917 Sep 29 21:27:53 crc kubenswrapper[4911]: I0929 21:27:53.053146 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-7nx76"] Sep 29 21:27:53 crc kubenswrapper[4911]: I0929 21:27:53.054305 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7nx76" Sep 29 21:27:53 crc kubenswrapper[4911]: I0929 21:27:53.069772 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-7nx76"] Sep 29 21:27:53 crc kubenswrapper[4911]: I0929 21:27:53.107932 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m9d9l" Sep 29 21:27:53 crc kubenswrapper[4911]: I0929 21:27:53.165409 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/adc47074-abdd-41ee-b5de-8c483fc77226-utilities\") pod \"redhat-operators-7nx76\" (UID: \"adc47074-abdd-41ee-b5de-8c483fc77226\") " pod="openshift-marketplace/redhat-operators-7nx76" Sep 29 21:27:53 crc kubenswrapper[4911]: I0929 21:27:53.165574 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/adc47074-abdd-41ee-b5de-8c483fc77226-catalog-content\") pod \"redhat-operators-7nx76\" (UID: \"adc47074-abdd-41ee-b5de-8c483fc77226\") " pod="openshift-marketplace/redhat-operators-7nx76" Sep 29 21:27:53 crc kubenswrapper[4911]: I0929 21:27:53.165761 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rr6v5\" (UniqueName: \"kubernetes.io/projected/adc47074-abdd-41ee-b5de-8c483fc77226-kube-api-access-rr6v5\") pod \"redhat-operators-7nx76\" (UID: \"adc47074-abdd-41ee-b5de-8c483fc77226\") " pod="openshift-marketplace/redhat-operators-7nx76" Sep 29 21:27:53 crc kubenswrapper[4911]: I0929 21:27:53.266965 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/adc47074-abdd-41ee-b5de-8c483fc77226-utilities\") pod \"redhat-operators-7nx76\" (UID: \"adc47074-abdd-41ee-b5de-8c483fc77226\") " pod="openshift-marketplace/redhat-operators-7nx76" Sep 29 21:27:53 crc kubenswrapper[4911]: I0929 21:27:53.267405 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/adc47074-abdd-41ee-b5de-8c483fc77226-catalog-content\") pod \"redhat-operators-7nx76\" (UID: \"adc47074-abdd-41ee-b5de-8c483fc77226\") " pod="openshift-marketplace/redhat-operators-7nx76" Sep 29 21:27:53 crc kubenswrapper[4911]: I0929 21:27:53.267476 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rr6v5\" (UniqueName: \"kubernetes.io/projected/adc47074-abdd-41ee-b5de-8c483fc77226-kube-api-access-rr6v5\") pod \"redhat-operators-7nx76\" (UID: \"adc47074-abdd-41ee-b5de-8c483fc77226\") " pod="openshift-marketplace/redhat-operators-7nx76" Sep 29 21:27:53 crc kubenswrapper[4911]: I0929 21:27:53.268219 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/adc47074-abdd-41ee-b5de-8c483fc77226-catalog-content\") pod \"redhat-operators-7nx76\" (UID: \"adc47074-abdd-41ee-b5de-8c483fc77226\") " pod="openshift-marketplace/redhat-operators-7nx76" Sep 29 21:27:53 crc kubenswrapper[4911]: I0929 21:27:53.268240 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/adc47074-abdd-41ee-b5de-8c483fc77226-utilities\") pod \"redhat-operators-7nx76\" (UID: \"adc47074-abdd-41ee-b5de-8c483fc77226\") " pod="openshift-marketplace/redhat-operators-7nx76" Sep 29 21:27:53 crc kubenswrapper[4911]: I0929 21:27:53.307475 4911 patch_prober.go:28] interesting pod/router-default-5444994796-gx2fb container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 21:27:53 crc kubenswrapper[4911]: [-]has-synced failed: reason withheld Sep 29 21:27:53 crc kubenswrapper[4911]: [+]process-running ok Sep 29 21:27:53 crc kubenswrapper[4911]: healthz check failed Sep 29 21:27:53 crc kubenswrapper[4911]: I0929 21:27:53.307562 4911 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-gx2fb" podUID="bead4097-1138-4381-9884-93bbf059b717" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 21:27:53 crc kubenswrapper[4911]: I0929 21:27:53.332375 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rr6v5\" (UniqueName: \"kubernetes.io/projected/adc47074-abdd-41ee-b5de-8c483fc77226-kube-api-access-rr6v5\") pod \"redhat-operators-7nx76\" (UID: \"adc47074-abdd-41ee-b5de-8c483fc77226\") " pod="openshift-marketplace/redhat-operators-7nx76" Sep 29 21:27:53 crc kubenswrapper[4911]: I0929 21:27:53.375074 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-m9d9l"] Sep 29 21:27:53 crc kubenswrapper[4911]: I0929 21:27:53.383708 4911 generic.go:334] "Generic (PLEG): container finished" podID="596b137a-5101-4a31-85b8-050945c1de9b" containerID="e7a83d500e3f93100dc284099fae8792152446ece658fad56b9a50a879018cbe" exitCode=0 Sep 29 21:27:53 crc kubenswrapper[4911]: I0929 21:27:53.383900 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hxt4d" event={"ID":"596b137a-5101-4a31-85b8-050945c1de9b","Type":"ContainerDied","Data":"e7a83d500e3f93100dc284099fae8792152446ece658fad56b9a50a879018cbe"} Sep 29 21:27:53 crc kubenswrapper[4911]: I0929 21:27:53.384190 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hxt4d" event={"ID":"596b137a-5101-4a31-85b8-050945c1de9b","Type":"ContainerStarted","Data":"122a95699c8b7ad6c0a4f25d49c25b157055a28a10108321f27866606cfe29a2"} Sep 29 21:27:53 crc kubenswrapper[4911]: W0929 21:27:53.395277 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfd54ef5c_5d59_4326_aba3_ba1a915313c9.slice/crio-ea96b6f6921b59594e676c818f6009053dbc857b760af8ddb16e8346a6c0b1a1 WatchSource:0}: Error finding container ea96b6f6921b59594e676c818f6009053dbc857b760af8ddb16e8346a6c0b1a1: Status 404 returned error can't find the container with id ea96b6f6921b59594e676c818f6009053dbc857b760af8ddb16e8346a6c0b1a1 Sep 29 21:27:53 crc kubenswrapper[4911]: I0929 21:27:53.398444 4911 generic.go:334] "Generic (PLEG): container finished" podID="f8f3e163-1816-4969-8594-c1a11760793a" containerID="f2e77eef62209aeec14c57a8c71ea55031d2e4e4884df9c2339361a508728c84" exitCode=0 Sep 29 21:27:53 crc kubenswrapper[4911]: I0929 21:27:53.398570 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5kc9d" event={"ID":"f8f3e163-1816-4969-8594-c1a11760793a","Type":"ContainerDied","Data":"f2e77eef62209aeec14c57a8c71ea55031d2e4e4884df9c2339361a508728c84"} Sep 29 21:27:53 crc kubenswrapper[4911]: I0929 21:27:53.398615 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5kc9d" event={"ID":"f8f3e163-1816-4969-8594-c1a11760793a","Type":"ContainerStarted","Data":"62d7509a6ed605a0c4c42ce020ac38df29961a1c7285d4a193ced5fd37de8706"} Sep 29 21:27:53 crc kubenswrapper[4911]: I0929 21:27:53.412911 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-szrp2" Sep 29 21:27:53 crc kubenswrapper[4911]: I0929 21:27:53.412967 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-szrp2" Sep 29 21:27:53 crc kubenswrapper[4911]: I0929 21:27:53.418701 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" event={"ID":"4357f10f-dad3-4233-9d03-1cad6319e4a9","Type":"ContainerStarted","Data":"581ca7861123268b62b37013105aecb329f541e02bdb0476461070e874dff07b"} Sep 29 21:27:53 crc kubenswrapper[4911]: I0929 21:27:53.423115 4911 patch_prober.go:28] interesting pod/console-f9d7485db-szrp2 container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.23:8443/health\": dial tcp 10.217.0.23:8443: connect: connection refused" start-of-body= Sep 29 21:27:53 crc kubenswrapper[4911]: I0929 21:27:53.423190 4911 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-szrp2" podUID="96f91b1a-e276-4bc1-9308-5375745c803c" containerName="console" probeResult="failure" output="Get \"https://10.217.0.23:8443/health\": dial tcp 10.217.0.23:8443: connect: connection refused" Sep 29 21:27:53 crc kubenswrapper[4911]: I0929 21:27:53.431852 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:27:53 crc kubenswrapper[4911]: I0929 21:27:53.442076 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-grwjm" event={"ID":"ae2112f8-47a1-4faf-8ee6-83f96c5a3def","Type":"ContainerStarted","Data":"09052d3e14dd09d8e24e983b65cbc7af7f500f468b7455a3c9e279946aaa2af7"} Sep 29 21:27:53 crc kubenswrapper[4911]: I0929 21:27:53.451666 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"587286a6-f2ab-423c-8569-9f9d2b103edf","Type":"ContainerStarted","Data":"45302e430d540085820844b0cf63f275520d1a6e894a8145e495de4e96ad163e"} Sep 29 21:27:53 crc kubenswrapper[4911]: I0929 21:27:53.455947 4911 generic.go:334] "Generic (PLEG): container finished" podID="2ad88e09-2e13-4070-9c1f-75cb9dd12ebf" containerID="4338ac5cad4c96f3f2896e8f12a7c11398b9594037fa7d204caf3949624f5c8e" exitCode=0 Sep 29 21:27:53 crc kubenswrapper[4911]: I0929 21:27:53.456049 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319675-qdtss" event={"ID":"2ad88e09-2e13-4070-9c1f-75cb9dd12ebf","Type":"ContainerDied","Data":"4338ac5cad4c96f3f2896e8f12a7c11398b9594037fa7d204caf3949624f5c8e"} Sep 29 21:27:53 crc kubenswrapper[4911]: I0929 21:27:53.458613 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8xxks" event={"ID":"5be9e2b9-d97c-40c1-9179-2703b878679b","Type":"ContainerStarted","Data":"809523b63ce5f678b446777ccb64d99daa9545602c700a3fd356fb4983de3917"} Sep 29 21:27:53 crc kubenswrapper[4911]: I0929 21:27:53.469364 4911 generic.go:334] "Generic (PLEG): container finished" podID="2f3bc61e-17ba-4f89-b582-1d4efd6d7146" containerID="f6476bac94160453c95d099d6464c2395a4ae99d0b00c5efe805f5765798ff19" exitCode=0 Sep 29 21:27:53 crc kubenswrapper[4911]: I0929 21:27:53.469476 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qksj7" event={"ID":"2f3bc61e-17ba-4f89-b582-1d4efd6d7146","Type":"ContainerDied","Data":"f6476bac94160453c95d099d6464c2395a4ae99d0b00c5efe805f5765798ff19"} Sep 29 21:27:53 crc kubenswrapper[4911]: I0929 21:27:53.477017 4911 generic.go:334] "Generic (PLEG): container finished" podID="e95e31d7-3d9d-4ee6-b1e8-f1595e4e398e" containerID="b8417bd352fe825d87d1d9a924268f11b9f5e1dc1fc29150226e433f6646ffe4" exitCode=0 Sep 29 21:27:53 crc kubenswrapper[4911]: I0929 21:27:53.477519 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tlx2w" event={"ID":"e95e31d7-3d9d-4ee6-b1e8-f1595e4e398e","Type":"ContainerDied","Data":"b8417bd352fe825d87d1d9a924268f11b9f5e1dc1fc29150226e433f6646ffe4"} Sep 29 21:27:53 crc kubenswrapper[4911]: I0929 21:27:53.505137 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"2610e1290a81e6fe027fddedb127e38d2b2bd427f747fc001e04e116cb4143bc"} Sep 29 21:27:53 crc kubenswrapper[4911]: I0929 21:27:53.505215 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"ae8b139067f77abfd7efdb8e5c7efe07c4da8e581fb8f01f491b38387678a4e1"} Sep 29 21:27:53 crc kubenswrapper[4911]: I0929 21:27:53.525918 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7nx76" Sep 29 21:27:53 crc kubenswrapper[4911]: I0929 21:27:53.533260 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-rtrxr" Sep 29 21:27:53 crc kubenswrapper[4911]: I0929 21:27:53.533311 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-rtrxr" Sep 29 21:27:53 crc kubenswrapper[4911]: I0929 21:27:53.539936 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" podStartSLOduration=130.539901226 podStartE2EDuration="2m10.539901226s" podCreationTimestamp="2025-09-29 21:25:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:27:53.474691734 +0000 UTC m=+151.451804415" watchObservedRunningTime="2025-09-29 21:27:53.539901226 +0000 UTC m=+151.517013897" Sep 29 21:27:53 crc kubenswrapper[4911]: I0929 21:27:53.540539 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-grwjm" podStartSLOduration=12.540532084 podStartE2EDuration="12.540532084s" podCreationTimestamp="2025-09-29 21:27:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:27:53.525418607 +0000 UTC m=+151.502531278" watchObservedRunningTime="2025-09-29 21:27:53.540532084 +0000 UTC m=+151.517644755" Sep 29 21:27:53 crc kubenswrapper[4911]: I0929 21:27:53.552988 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-rtrxr" Sep 29 21:27:53 crc kubenswrapper[4911]: I0929 21:27:53.583411 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=2.583388171 podStartE2EDuration="2.583388171s" podCreationTimestamp="2025-09-29 21:27:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:27:53.583172884 +0000 UTC m=+151.560285555" watchObservedRunningTime="2025-09-29 21:27:53.583388171 +0000 UTC m=+151.560500842" Sep 29 21:27:53 crc kubenswrapper[4911]: I0929 21:27:53.658097 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6sql5" Sep 29 21:27:53 crc kubenswrapper[4911]: I0929 21:27:53.659142 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6sql5" Sep 29 21:27:53 crc kubenswrapper[4911]: I0929 21:27:53.679503 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6sql5" Sep 29 21:27:53 crc kubenswrapper[4911]: I0929 21:27:53.863993 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-7nx76"] Sep 29 21:27:53 crc kubenswrapper[4911]: W0929 21:27:53.887369 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podadc47074_abdd_41ee_b5de_8c483fc77226.slice/crio-e98bcbc74a4ecbf637608dd642a689384a4969a51cc87e64cdcde8008b428ce2 WatchSource:0}: Error finding container e98bcbc74a4ecbf637608dd642a689384a4969a51cc87e64cdcde8008b428ce2: Status 404 returned error can't find the container with id e98bcbc74a4ecbf637608dd642a689384a4969a51cc87e64cdcde8008b428ce2 Sep 29 21:27:54 crc kubenswrapper[4911]: I0929 21:27:54.096326 4911 patch_prober.go:28] interesting pod/downloads-7954f5f757-mrbmd container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Sep 29 21:27:54 crc kubenswrapper[4911]: I0929 21:27:54.096419 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-mrbmd" podUID="7362e2ef-cab6-4891-bb59-a7969b6e72b6" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Sep 29 21:27:54 crc kubenswrapper[4911]: I0929 21:27:54.096588 4911 patch_prober.go:28] interesting pod/downloads-7954f5f757-mrbmd container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Sep 29 21:27:54 crc kubenswrapper[4911]: I0929 21:27:54.096625 4911 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-mrbmd" podUID="7362e2ef-cab6-4891-bb59-a7969b6e72b6" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Sep 29 21:27:54 crc kubenswrapper[4911]: I0929 21:27:54.307170 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-gx2fb" Sep 29 21:27:54 crc kubenswrapper[4911]: I0929 21:27:54.311144 4911 patch_prober.go:28] interesting pod/router-default-5444994796-gx2fb container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 21:27:54 crc kubenswrapper[4911]: [-]has-synced failed: reason withheld Sep 29 21:27:54 crc kubenswrapper[4911]: [+]process-running ok Sep 29 21:27:54 crc kubenswrapper[4911]: healthz check failed Sep 29 21:27:54 crc kubenswrapper[4911]: I0929 21:27:54.311200 4911 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-gx2fb" podUID="bead4097-1138-4381-9884-93bbf059b717" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 21:27:54 crc kubenswrapper[4911]: I0929 21:27:54.509770 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-s8dhg" Sep 29 21:27:54 crc kubenswrapper[4911]: I0929 21:27:54.547216 4911 generic.go:334] "Generic (PLEG): container finished" podID="587286a6-f2ab-423c-8569-9f9d2b103edf" containerID="97a1a67cbf135662c00a9afdd19c3f7122f1d40175348443fa7c217602240cae" exitCode=0 Sep 29 21:27:54 crc kubenswrapper[4911]: I0929 21:27:54.547309 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"587286a6-f2ab-423c-8569-9f9d2b103edf","Type":"ContainerDied","Data":"97a1a67cbf135662c00a9afdd19c3f7122f1d40175348443fa7c217602240cae"} Sep 29 21:27:54 crc kubenswrapper[4911]: I0929 21:27:54.553182 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" event={"ID":"4357f10f-dad3-4233-9d03-1cad6319e4a9","Type":"ContainerStarted","Data":"779d470e2b33d9d003084c46afb86c56f65c25a8d4c919970e47fa4313dae7fb"} Sep 29 21:27:54 crc kubenswrapper[4911]: I0929 21:27:54.571383 4911 generic.go:334] "Generic (PLEG): container finished" podID="5be9e2b9-d97c-40c1-9179-2703b878679b" containerID="2a5dffc331256b3351c63628c0a607f462efe380197a09b63a7440dac4760428" exitCode=0 Sep 29 21:27:54 crc kubenswrapper[4911]: I0929 21:27:54.571477 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8xxks" event={"ID":"5be9e2b9-d97c-40c1-9179-2703b878679b","Type":"ContainerDied","Data":"2a5dffc331256b3351c63628c0a607f462efe380197a09b63a7440dac4760428"} Sep 29 21:27:54 crc kubenswrapper[4911]: I0929 21:27:54.577652 4911 generic.go:334] "Generic (PLEG): container finished" podID="adc47074-abdd-41ee-b5de-8c483fc77226" containerID="46030920e5eade1f836a7c1b21a8c0673a671388397003bec977855eef1c005f" exitCode=0 Sep 29 21:27:54 crc kubenswrapper[4911]: I0929 21:27:54.577732 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7nx76" event={"ID":"adc47074-abdd-41ee-b5de-8c483fc77226","Type":"ContainerDied","Data":"46030920e5eade1f836a7c1b21a8c0673a671388397003bec977855eef1c005f"} Sep 29 21:27:54 crc kubenswrapper[4911]: I0929 21:27:54.577764 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7nx76" event={"ID":"adc47074-abdd-41ee-b5de-8c483fc77226","Type":"ContainerStarted","Data":"e98bcbc74a4ecbf637608dd642a689384a4969a51cc87e64cdcde8008b428ce2"} Sep 29 21:27:54 crc kubenswrapper[4911]: I0929 21:27:54.591738 4911 generic.go:334] "Generic (PLEG): container finished" podID="fd54ef5c-5d59-4326-aba3-ba1a915313c9" containerID="40dafd89bfc7fd90dd5ce34462dce3535d4301ec7218f92f5f30bf27af94cbdb" exitCode=0 Sep 29 21:27:54 crc kubenswrapper[4911]: I0929 21:27:54.592979 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m9d9l" event={"ID":"fd54ef5c-5d59-4326-aba3-ba1a915313c9","Type":"ContainerDied","Data":"40dafd89bfc7fd90dd5ce34462dce3535d4301ec7218f92f5f30bf27af94cbdb"} Sep 29 21:27:54 crc kubenswrapper[4911]: I0929 21:27:54.593072 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m9d9l" event={"ID":"fd54ef5c-5d59-4326-aba3-ba1a915313c9","Type":"ContainerStarted","Data":"ea96b6f6921b59594e676c818f6009053dbc857b760af8ddb16e8346a6c0b1a1"} Sep 29 21:27:54 crc kubenswrapper[4911]: I0929 21:27:54.600130 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-rtrxr" Sep 29 21:27:54 crc kubenswrapper[4911]: I0929 21:27:54.604748 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6sql5" Sep 29 21:27:55 crc kubenswrapper[4911]: I0929 21:27:55.049136 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319675-qdtss" Sep 29 21:27:55 crc kubenswrapper[4911]: I0929 21:27:55.104895 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kdhfg\" (UniqueName: \"kubernetes.io/projected/2ad88e09-2e13-4070-9c1f-75cb9dd12ebf-kube-api-access-kdhfg\") pod \"2ad88e09-2e13-4070-9c1f-75cb9dd12ebf\" (UID: \"2ad88e09-2e13-4070-9c1f-75cb9dd12ebf\") " Sep 29 21:27:55 crc kubenswrapper[4911]: I0929 21:27:55.104957 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2ad88e09-2e13-4070-9c1f-75cb9dd12ebf-config-volume\") pod \"2ad88e09-2e13-4070-9c1f-75cb9dd12ebf\" (UID: \"2ad88e09-2e13-4070-9c1f-75cb9dd12ebf\") " Sep 29 21:27:55 crc kubenswrapper[4911]: I0929 21:27:55.104997 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2ad88e09-2e13-4070-9c1f-75cb9dd12ebf-secret-volume\") pod \"2ad88e09-2e13-4070-9c1f-75cb9dd12ebf\" (UID: \"2ad88e09-2e13-4070-9c1f-75cb9dd12ebf\") " Sep 29 21:27:55 crc kubenswrapper[4911]: I0929 21:27:55.106903 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2ad88e09-2e13-4070-9c1f-75cb9dd12ebf-config-volume" (OuterVolumeSpecName: "config-volume") pod "2ad88e09-2e13-4070-9c1f-75cb9dd12ebf" (UID: "2ad88e09-2e13-4070-9c1f-75cb9dd12ebf"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:27:55 crc kubenswrapper[4911]: I0929 21:27:55.128066 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ad88e09-2e13-4070-9c1f-75cb9dd12ebf-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "2ad88e09-2e13-4070-9c1f-75cb9dd12ebf" (UID: "2ad88e09-2e13-4070-9c1f-75cb9dd12ebf"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:27:55 crc kubenswrapper[4911]: I0929 21:27:55.134859 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2ad88e09-2e13-4070-9c1f-75cb9dd12ebf-kube-api-access-kdhfg" (OuterVolumeSpecName: "kube-api-access-kdhfg") pod "2ad88e09-2e13-4070-9c1f-75cb9dd12ebf" (UID: "2ad88e09-2e13-4070-9c1f-75cb9dd12ebf"). InnerVolumeSpecName "kube-api-access-kdhfg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:27:55 crc kubenswrapper[4911]: I0929 21:27:55.207049 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kdhfg\" (UniqueName: \"kubernetes.io/projected/2ad88e09-2e13-4070-9c1f-75cb9dd12ebf-kube-api-access-kdhfg\") on node \"crc\" DevicePath \"\"" Sep 29 21:27:55 crc kubenswrapper[4911]: I0929 21:27:55.207466 4911 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2ad88e09-2e13-4070-9c1f-75cb9dd12ebf-config-volume\") on node \"crc\" DevicePath \"\"" Sep 29 21:27:55 crc kubenswrapper[4911]: I0929 21:27:55.207483 4911 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2ad88e09-2e13-4070-9c1f-75cb9dd12ebf-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 29 21:27:55 crc kubenswrapper[4911]: I0929 21:27:55.211385 4911 patch_prober.go:28] interesting pod/machine-config-daemon-w647f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 21:27:55 crc kubenswrapper[4911]: I0929 21:27:55.211435 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 21:27:55 crc kubenswrapper[4911]: I0929 21:27:55.308337 4911 patch_prober.go:28] interesting pod/router-default-5444994796-gx2fb container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 21:27:55 crc kubenswrapper[4911]: [-]has-synced failed: reason withheld Sep 29 21:27:55 crc kubenswrapper[4911]: [+]process-running ok Sep 29 21:27:55 crc kubenswrapper[4911]: healthz check failed Sep 29 21:27:55 crc kubenswrapper[4911]: I0929 21:27:55.308449 4911 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-gx2fb" podUID="bead4097-1138-4381-9884-93bbf059b717" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 21:27:55 crc kubenswrapper[4911]: I0929 21:27:55.652624 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319675-qdtss" Sep 29 21:27:55 crc kubenswrapper[4911]: I0929 21:27:55.656948 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319675-qdtss" event={"ID":"2ad88e09-2e13-4070-9c1f-75cb9dd12ebf","Type":"ContainerDied","Data":"f08a8994a1230c8c4a574e0226b25cc826ec56db3dc5e5bc0ab10cdbca2c84b1"} Sep 29 21:27:55 crc kubenswrapper[4911]: I0929 21:27:55.658030 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f08a8994a1230c8c4a574e0226b25cc826ec56db3dc5e5bc0ab10cdbca2c84b1" Sep 29 21:27:55 crc kubenswrapper[4911]: I0929 21:27:55.891313 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Sep 29 21:27:55 crc kubenswrapper[4911]: E0929 21:27:55.891581 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ad88e09-2e13-4070-9c1f-75cb9dd12ebf" containerName="collect-profiles" Sep 29 21:27:55 crc kubenswrapper[4911]: I0929 21:27:55.891599 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ad88e09-2e13-4070-9c1f-75cb9dd12ebf" containerName="collect-profiles" Sep 29 21:27:55 crc kubenswrapper[4911]: I0929 21:27:55.891700 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="2ad88e09-2e13-4070-9c1f-75cb9dd12ebf" containerName="collect-profiles" Sep 29 21:27:55 crc kubenswrapper[4911]: I0929 21:27:55.892130 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 21:27:55 crc kubenswrapper[4911]: I0929 21:27:55.895871 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Sep 29 21:27:55 crc kubenswrapper[4911]: I0929 21:27:55.896609 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Sep 29 21:27:55 crc kubenswrapper[4911]: I0929 21:27:55.904896 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Sep 29 21:27:56 crc kubenswrapper[4911]: I0929 21:27:56.028133 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f5bb17fe-bdd3-4550-86f9-6a75f6ae8801-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"f5bb17fe-bdd3-4550-86f9-6a75f6ae8801\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 21:27:56 crc kubenswrapper[4911]: I0929 21:27:56.028193 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f5bb17fe-bdd3-4550-86f9-6a75f6ae8801-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"f5bb17fe-bdd3-4550-86f9-6a75f6ae8801\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 21:27:56 crc kubenswrapper[4911]: I0929 21:27:56.047374 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 21:27:56 crc kubenswrapper[4911]: I0929 21:27:56.135720 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f5bb17fe-bdd3-4550-86f9-6a75f6ae8801-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"f5bb17fe-bdd3-4550-86f9-6a75f6ae8801\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 21:27:56 crc kubenswrapper[4911]: I0929 21:27:56.135814 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f5bb17fe-bdd3-4550-86f9-6a75f6ae8801-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"f5bb17fe-bdd3-4550-86f9-6a75f6ae8801\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 21:27:56 crc kubenswrapper[4911]: I0929 21:27:56.136320 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f5bb17fe-bdd3-4550-86f9-6a75f6ae8801-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"f5bb17fe-bdd3-4550-86f9-6a75f6ae8801\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 21:27:56 crc kubenswrapper[4911]: I0929 21:27:56.165292 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f5bb17fe-bdd3-4550-86f9-6a75f6ae8801-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"f5bb17fe-bdd3-4550-86f9-6a75f6ae8801\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 21:27:56 crc kubenswrapper[4911]: I0929 21:27:56.224852 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 21:27:56 crc kubenswrapper[4911]: I0929 21:27:56.238578 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/587286a6-f2ab-423c-8569-9f9d2b103edf-kubelet-dir\") pod \"587286a6-f2ab-423c-8569-9f9d2b103edf\" (UID: \"587286a6-f2ab-423c-8569-9f9d2b103edf\") " Sep 29 21:27:56 crc kubenswrapper[4911]: I0929 21:27:56.238976 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/587286a6-f2ab-423c-8569-9f9d2b103edf-kube-api-access\") pod \"587286a6-f2ab-423c-8569-9f9d2b103edf\" (UID: \"587286a6-f2ab-423c-8569-9f9d2b103edf\") " Sep 29 21:27:56 crc kubenswrapper[4911]: I0929 21:27:56.241441 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/587286a6-f2ab-423c-8569-9f9d2b103edf-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "587286a6-f2ab-423c-8569-9f9d2b103edf" (UID: "587286a6-f2ab-423c-8569-9f9d2b103edf"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 21:27:56 crc kubenswrapper[4911]: I0929 21:27:56.244979 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/587286a6-f2ab-423c-8569-9f9d2b103edf-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "587286a6-f2ab-423c-8569-9f9d2b103edf" (UID: "587286a6-f2ab-423c-8569-9f9d2b103edf"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:27:56 crc kubenswrapper[4911]: I0929 21:27:56.309854 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-gx2fb" Sep 29 21:27:56 crc kubenswrapper[4911]: I0929 21:27:56.324615 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-gx2fb" Sep 29 21:27:56 crc kubenswrapper[4911]: I0929 21:27:56.341409 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/587286a6-f2ab-423c-8569-9f9d2b103edf-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 29 21:27:56 crc kubenswrapper[4911]: I0929 21:27:56.341459 4911 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/587286a6-f2ab-423c-8569-9f9d2b103edf-kubelet-dir\") on node \"crc\" DevicePath \"\"" Sep 29 21:27:56 crc kubenswrapper[4911]: I0929 21:27:56.363765 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" Sep 29 21:27:56 crc kubenswrapper[4911]: I0929 21:27:56.787508 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"587286a6-f2ab-423c-8569-9f9d2b103edf","Type":"ContainerDied","Data":"45302e430d540085820844b0cf63f275520d1a6e894a8145e495de4e96ad163e"} Sep 29 21:27:56 crc kubenswrapper[4911]: I0929 21:27:56.787587 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="45302e430d540085820844b0cf63f275520d1a6e894a8145e495de4e96ad163e" Sep 29 21:27:56 crc kubenswrapper[4911]: I0929 21:27:56.787685 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 21:27:56 crc kubenswrapper[4911]: I0929 21:27:56.930295 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Sep 29 21:27:57 crc kubenswrapper[4911]: I0929 21:27:57.804646 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"f5bb17fe-bdd3-4550-86f9-6a75f6ae8801","Type":"ContainerStarted","Data":"7b7a49c9a5173f1467a3ddeeacae74e59e1179d029d7d9f24123bfe33fe011dd"} Sep 29 21:27:58 crc kubenswrapper[4911]: I0929 21:27:58.831296 4911 generic.go:334] "Generic (PLEG): container finished" podID="f5bb17fe-bdd3-4550-86f9-6a75f6ae8801" containerID="d97a438f82ff67f3b214187cab632701f4f7e7fd227e2784276fe87e010b03e5" exitCode=0 Sep 29 21:27:58 crc kubenswrapper[4911]: I0929 21:27:58.831386 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"f5bb17fe-bdd3-4550-86f9-6a75f6ae8801","Type":"ContainerDied","Data":"d97a438f82ff67f3b214187cab632701f4f7e7fd227e2784276fe87e010b03e5"} Sep 29 21:27:59 crc kubenswrapper[4911]: I0929 21:27:59.432420 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-2pbvr" Sep 29 21:28:00 crc kubenswrapper[4911]: I0929 21:28:00.469709 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 21:28:00 crc kubenswrapper[4911]: I0929 21:28:00.642087 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f5bb17fe-bdd3-4550-86f9-6a75f6ae8801-kubelet-dir\") pod \"f5bb17fe-bdd3-4550-86f9-6a75f6ae8801\" (UID: \"f5bb17fe-bdd3-4550-86f9-6a75f6ae8801\") " Sep 29 21:28:00 crc kubenswrapper[4911]: I0929 21:28:00.642231 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f5bb17fe-bdd3-4550-86f9-6a75f6ae8801-kube-api-access\") pod \"f5bb17fe-bdd3-4550-86f9-6a75f6ae8801\" (UID: \"f5bb17fe-bdd3-4550-86f9-6a75f6ae8801\") " Sep 29 21:28:00 crc kubenswrapper[4911]: I0929 21:28:00.643911 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f5bb17fe-bdd3-4550-86f9-6a75f6ae8801-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "f5bb17fe-bdd3-4550-86f9-6a75f6ae8801" (UID: "f5bb17fe-bdd3-4550-86f9-6a75f6ae8801"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 21:28:00 crc kubenswrapper[4911]: I0929 21:28:00.652554 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5bb17fe-bdd3-4550-86f9-6a75f6ae8801-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "f5bb17fe-bdd3-4550-86f9-6a75f6ae8801" (UID: "f5bb17fe-bdd3-4550-86f9-6a75f6ae8801"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:28:00 crc kubenswrapper[4911]: I0929 21:28:00.745000 4911 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f5bb17fe-bdd3-4550-86f9-6a75f6ae8801-kubelet-dir\") on node \"crc\" DevicePath \"\"" Sep 29 21:28:00 crc kubenswrapper[4911]: I0929 21:28:00.745151 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f5bb17fe-bdd3-4550-86f9-6a75f6ae8801-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 29 21:28:00 crc kubenswrapper[4911]: I0929 21:28:00.868119 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"f5bb17fe-bdd3-4550-86f9-6a75f6ae8801","Type":"ContainerDied","Data":"7b7a49c9a5173f1467a3ddeeacae74e59e1179d029d7d9f24123bfe33fe011dd"} Sep 29 21:28:00 crc kubenswrapper[4911]: I0929 21:28:00.868169 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7b7a49c9a5173f1467a3ddeeacae74e59e1179d029d7d9f24123bfe33fe011dd" Sep 29 21:28:00 crc kubenswrapper[4911]: I0929 21:28:00.868257 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 21:28:03 crc kubenswrapper[4911]: I0929 21:28:03.584518 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-szrp2" Sep 29 21:28:03 crc kubenswrapper[4911]: I0929 21:28:03.588529 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-szrp2" Sep 29 21:28:04 crc kubenswrapper[4911]: I0929 21:28:04.116884 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-mrbmd" Sep 29 21:28:05 crc kubenswrapper[4911]: I0929 21:28:05.319062 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b53f9593-39bf-43e0-b1de-09192d0167cd-metrics-certs\") pod \"network-metrics-daemon-d5gdh\" (UID: \"b53f9593-39bf-43e0-b1de-09192d0167cd\") " pod="openshift-multus/network-metrics-daemon-d5gdh" Sep 29 21:28:05 crc kubenswrapper[4911]: I0929 21:28:05.327654 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b53f9593-39bf-43e0-b1de-09192d0167cd-metrics-certs\") pod \"network-metrics-daemon-d5gdh\" (UID: \"b53f9593-39bf-43e0-b1de-09192d0167cd\") " pod="openshift-multus/network-metrics-daemon-d5gdh" Sep 29 21:28:05 crc kubenswrapper[4911]: I0929 21:28:05.340463 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-d5gdh" Sep 29 21:28:12 crc kubenswrapper[4911]: I0929 21:28:12.344795 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:28:14 crc kubenswrapper[4911]: I0929 21:28:14.436272 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-d5gdh"] Sep 29 21:28:19 crc kubenswrapper[4911]: W0929 21:28:19.396073 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb53f9593_39bf_43e0_b1de_09192d0167cd.slice/crio-3930e5b9adb9266823888544a45e929a849c66123963f9431febc7af414a279f WatchSource:0}: Error finding container 3930e5b9adb9266823888544a45e929a849c66123963f9431febc7af414a279f: Status 404 returned error can't find the container with id 3930e5b9adb9266823888544a45e929a849c66123963f9431febc7af414a279f Sep 29 21:28:19 crc kubenswrapper[4911]: E0929 21:28:19.528029 4911 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Sep 29 21:28:19 crc kubenswrapper[4911]: E0929 21:28:19.528260 4911 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-6jn4k,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-8xxks_openshift-marketplace(5be9e2b9-d97c-40c1-9179-2703b878679b): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 29 21:28:19 crc kubenswrapper[4911]: E0929 21:28:19.530044 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-8xxks" podUID="5be9e2b9-d97c-40c1-9179-2703b878679b" Sep 29 21:28:20 crc kubenswrapper[4911]: I0929 21:28:20.011908 4911 generic.go:334] "Generic (PLEG): container finished" podID="596b137a-5101-4a31-85b8-050945c1de9b" containerID="2e6f07385cf63d0494c2ff76f5c4a17f89332df599749aca3f223efbed594922" exitCode=0 Sep 29 21:28:20 crc kubenswrapper[4911]: I0929 21:28:20.012654 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hxt4d" event={"ID":"596b137a-5101-4a31-85b8-050945c1de9b","Type":"ContainerDied","Data":"2e6f07385cf63d0494c2ff76f5c4a17f89332df599749aca3f223efbed594922"} Sep 29 21:28:20 crc kubenswrapper[4911]: I0929 21:28:20.021267 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5kc9d" event={"ID":"f8f3e163-1816-4969-8594-c1a11760793a","Type":"ContainerStarted","Data":"a5a06a6961d89fa37e8b936f64033afe78278cbfefbec8efd9c28f46ca63cecf"} Sep 29 21:28:20 crc kubenswrapper[4911]: I0929 21:28:20.027948 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cz97x" event={"ID":"4f7f281b-93f4-42fe-9996-2a5a4860ce88","Type":"ContainerStarted","Data":"eece27ba58678cf4b3b9de80942a6917996aef9947f33623a53e16aec1a0f6c4"} Sep 29 21:28:20 crc kubenswrapper[4911]: I0929 21:28:20.031659 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-d5gdh" event={"ID":"b53f9593-39bf-43e0-b1de-09192d0167cd","Type":"ContainerStarted","Data":"54c077604d93a3af3ca5c060c400f7231ecaba79a018921041c0771675f87a91"} Sep 29 21:28:20 crc kubenswrapper[4911]: I0929 21:28:20.031735 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-d5gdh" event={"ID":"b53f9593-39bf-43e0-b1de-09192d0167cd","Type":"ContainerStarted","Data":"3930e5b9adb9266823888544a45e929a849c66123963f9431febc7af414a279f"} Sep 29 21:28:20 crc kubenswrapper[4911]: I0929 21:28:20.034640 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qksj7" event={"ID":"2f3bc61e-17ba-4f89-b582-1d4efd6d7146","Type":"ContainerStarted","Data":"52205d062c35d88243fd29424b6c69de67a0594053fd37da7be4f118a4031746"} Sep 29 21:28:20 crc kubenswrapper[4911]: I0929 21:28:20.045510 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7nx76" event={"ID":"adc47074-abdd-41ee-b5de-8c483fc77226","Type":"ContainerStarted","Data":"55d3c6c2c750c675da2f64002d1871761b59888320dfafc7c3ee79fa000be824"} Sep 29 21:28:20 crc kubenswrapper[4911]: I0929 21:28:20.050764 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tlx2w" event={"ID":"e95e31d7-3d9d-4ee6-b1e8-f1595e4e398e","Type":"ContainerStarted","Data":"7bc4cd0ed31cf379af73df2fa287212f111aab6772b5954261a5eae4ea4ed2a8"} Sep 29 21:28:20 crc kubenswrapper[4911]: I0929 21:28:20.056038 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m9d9l" event={"ID":"fd54ef5c-5d59-4326-aba3-ba1a915313c9","Type":"ContainerStarted","Data":"7c20063bc4259927b13d11b2f0ff67b06a13aa260f23849b1f7e67da7e4e47fa"} Sep 29 21:28:20 crc kubenswrapper[4911]: E0929 21:28:20.061104 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-8xxks" podUID="5be9e2b9-d97c-40c1-9179-2703b878679b" Sep 29 21:28:21 crc kubenswrapper[4911]: I0929 21:28:21.065127 4911 generic.go:334] "Generic (PLEG): container finished" podID="f8f3e163-1816-4969-8594-c1a11760793a" containerID="a5a06a6961d89fa37e8b936f64033afe78278cbfefbec8efd9c28f46ca63cecf" exitCode=0 Sep 29 21:28:21 crc kubenswrapper[4911]: I0929 21:28:21.065274 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5kc9d" event={"ID":"f8f3e163-1816-4969-8594-c1a11760793a","Type":"ContainerDied","Data":"a5a06a6961d89fa37e8b936f64033afe78278cbfefbec8efd9c28f46ca63cecf"} Sep 29 21:28:21 crc kubenswrapper[4911]: I0929 21:28:21.069659 4911 generic.go:334] "Generic (PLEG): container finished" podID="4f7f281b-93f4-42fe-9996-2a5a4860ce88" containerID="eece27ba58678cf4b3b9de80942a6917996aef9947f33623a53e16aec1a0f6c4" exitCode=0 Sep 29 21:28:21 crc kubenswrapper[4911]: I0929 21:28:21.069838 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cz97x" event={"ID":"4f7f281b-93f4-42fe-9996-2a5a4860ce88","Type":"ContainerDied","Data":"eece27ba58678cf4b3b9de80942a6917996aef9947f33623a53e16aec1a0f6c4"} Sep 29 21:28:21 crc kubenswrapper[4911]: I0929 21:28:21.073647 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-d5gdh" event={"ID":"b53f9593-39bf-43e0-b1de-09192d0167cd","Type":"ContainerStarted","Data":"2fe822ef9929952ab522249f790ad7c20647a996cfad436d5113b32d2ea4ea6b"} Sep 29 21:28:21 crc kubenswrapper[4911]: I0929 21:28:21.075691 4911 generic.go:334] "Generic (PLEG): container finished" podID="2f3bc61e-17ba-4f89-b582-1d4efd6d7146" containerID="52205d062c35d88243fd29424b6c69de67a0594053fd37da7be4f118a4031746" exitCode=0 Sep 29 21:28:21 crc kubenswrapper[4911]: I0929 21:28:21.076437 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qksj7" event={"ID":"2f3bc61e-17ba-4f89-b582-1d4efd6d7146","Type":"ContainerDied","Data":"52205d062c35d88243fd29424b6c69de67a0594053fd37da7be4f118a4031746"} Sep 29 21:28:21 crc kubenswrapper[4911]: I0929 21:28:21.081834 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7nx76" event={"ID":"adc47074-abdd-41ee-b5de-8c483fc77226","Type":"ContainerDied","Data":"55d3c6c2c750c675da2f64002d1871761b59888320dfafc7c3ee79fa000be824"} Sep 29 21:28:21 crc kubenswrapper[4911]: I0929 21:28:21.081871 4911 generic.go:334] "Generic (PLEG): container finished" podID="adc47074-abdd-41ee-b5de-8c483fc77226" containerID="55d3c6c2c750c675da2f64002d1871761b59888320dfafc7c3ee79fa000be824" exitCode=0 Sep 29 21:28:21 crc kubenswrapper[4911]: I0929 21:28:21.094256 4911 generic.go:334] "Generic (PLEG): container finished" podID="e95e31d7-3d9d-4ee6-b1e8-f1595e4e398e" containerID="7bc4cd0ed31cf379af73df2fa287212f111aab6772b5954261a5eae4ea4ed2a8" exitCode=0 Sep 29 21:28:21 crc kubenswrapper[4911]: I0929 21:28:21.094529 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tlx2w" event={"ID":"e95e31d7-3d9d-4ee6-b1e8-f1595e4e398e","Type":"ContainerDied","Data":"7bc4cd0ed31cf379af73df2fa287212f111aab6772b5954261a5eae4ea4ed2a8"} Sep 29 21:28:21 crc kubenswrapper[4911]: I0929 21:28:21.096482 4911 generic.go:334] "Generic (PLEG): container finished" podID="fd54ef5c-5d59-4326-aba3-ba1a915313c9" containerID="7c20063bc4259927b13d11b2f0ff67b06a13aa260f23849b1f7e67da7e4e47fa" exitCode=0 Sep 29 21:28:21 crc kubenswrapper[4911]: I0929 21:28:21.096531 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m9d9l" event={"ID":"fd54ef5c-5d59-4326-aba3-ba1a915313c9","Type":"ContainerDied","Data":"7c20063bc4259927b13d11b2f0ff67b06a13aa260f23849b1f7e67da7e4e47fa"} Sep 29 21:28:21 crc kubenswrapper[4911]: I0929 21:28:21.110853 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-d5gdh" podStartSLOduration=158.110833699 podStartE2EDuration="2m38.110833699s" podCreationTimestamp="2025-09-29 21:25:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:28:21.1088617 +0000 UTC m=+179.085974401" watchObservedRunningTime="2025-09-29 21:28:21.110833699 +0000 UTC m=+179.087946370" Sep 29 21:28:22 crc kubenswrapper[4911]: I0929 21:28:22.105932 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hxt4d" event={"ID":"596b137a-5101-4a31-85b8-050945c1de9b","Type":"ContainerStarted","Data":"1b044fa87a9a6c81046846c12f98d0e91b26ce215d17b0f51930a6bec0c267a2"} Sep 29 21:28:24 crc kubenswrapper[4911]: I0929 21:28:24.121716 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tlx2w" event={"ID":"e95e31d7-3d9d-4ee6-b1e8-f1595e4e398e","Type":"ContainerStarted","Data":"5b538b3c0a19bd319ab64ffe010ca0189dfe513ef9379fa3d78aa0132b30d5a5"} Sep 29 21:28:24 crc kubenswrapper[4911]: I0929 21:28:24.125309 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m9d9l" event={"ID":"fd54ef5c-5d59-4326-aba3-ba1a915313c9","Type":"ContainerStarted","Data":"93c28e9ee4e904641fc45aff85da17b17330556461b41fdb193063169f31ee72"} Sep 29 21:28:24 crc kubenswrapper[4911]: I0929 21:28:24.129468 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5kc9d" event={"ID":"f8f3e163-1816-4969-8594-c1a11760793a","Type":"ContainerStarted","Data":"e6cf914451d9b8324bf8e8c872ad70aa56b8ba7877bf4f6624b24f36fb2d8a7e"} Sep 29 21:28:24 crc kubenswrapper[4911]: I0929 21:28:24.131911 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cz97x" event={"ID":"4f7f281b-93f4-42fe-9996-2a5a4860ce88","Type":"ContainerStarted","Data":"120c1d0845d25dba4daeccf45261faf753dbb38bf6d14700cd5b36ac8a91c27d"} Sep 29 21:28:24 crc kubenswrapper[4911]: I0929 21:28:24.134155 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qksj7" event={"ID":"2f3bc61e-17ba-4f89-b582-1d4efd6d7146","Type":"ContainerStarted","Data":"e710e1ab6de81605b0a0c0eaeaa0f951f26d3bd35a1f1c42643dc2379e99e155"} Sep 29 21:28:24 crc kubenswrapper[4911]: I0929 21:28:24.137983 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7nx76" event={"ID":"adc47074-abdd-41ee-b5de-8c483fc77226","Type":"ContainerStarted","Data":"2f5a95942f3eb26c2a794b34447ffda3cac5c8b1d713286feb4d1e2224270cda"} Sep 29 21:28:24 crc kubenswrapper[4911]: I0929 21:28:24.151281 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-hxt4d" podStartSLOduration=5.225324552 podStartE2EDuration="33.15125785s" podCreationTimestamp="2025-09-29 21:27:51 +0000 UTC" firstStartedPulling="2025-09-29 21:27:53.386887789 +0000 UTC m=+151.364000460" lastFinishedPulling="2025-09-29 21:28:21.312821087 +0000 UTC m=+179.289933758" observedRunningTime="2025-09-29 21:28:22.133107972 +0000 UTC m=+180.110220673" watchObservedRunningTime="2025-09-29 21:28:24.15125785 +0000 UTC m=+182.128370521" Sep 29 21:28:24 crc kubenswrapper[4911]: I0929 21:28:24.152922 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-tlx2w" podStartSLOduration=4.340481746 podStartE2EDuration="35.15291503s" podCreationTimestamp="2025-09-29 21:27:49 +0000 UTC" firstStartedPulling="2025-09-29 21:27:52.351706046 +0000 UTC m=+150.328818717" lastFinishedPulling="2025-09-29 21:28:23.16413933 +0000 UTC m=+181.141252001" observedRunningTime="2025-09-29 21:28:24.146729093 +0000 UTC m=+182.123841764" watchObservedRunningTime="2025-09-29 21:28:24.15291503 +0000 UTC m=+182.130027701" Sep 29 21:28:24 crc kubenswrapper[4911]: I0929 21:28:24.170112 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-5kc9d" podStartSLOduration=4.333581077 podStartE2EDuration="34.17009223s" podCreationTimestamp="2025-09-29 21:27:50 +0000 UTC" firstStartedPulling="2025-09-29 21:27:53.414371 +0000 UTC m=+151.391483671" lastFinishedPulling="2025-09-29 21:28:23.250882153 +0000 UTC m=+181.227994824" observedRunningTime="2025-09-29 21:28:24.168327876 +0000 UTC m=+182.145440547" watchObservedRunningTime="2025-09-29 21:28:24.17009223 +0000 UTC m=+182.147204911" Sep 29 21:28:24 crc kubenswrapper[4911]: I0929 21:28:24.193691 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-qksj7" podStartSLOduration=5.33704996 podStartE2EDuration="35.193671462s" podCreationTimestamp="2025-09-29 21:27:49 +0000 UTC" firstStartedPulling="2025-09-29 21:27:53.472340212 +0000 UTC m=+151.449452883" lastFinishedPulling="2025-09-29 21:28:23.328961714 +0000 UTC m=+181.306074385" observedRunningTime="2025-09-29 21:28:24.19062642 +0000 UTC m=+182.167739091" watchObservedRunningTime="2025-09-29 21:28:24.193671462 +0000 UTC m=+182.170784133" Sep 29 21:28:24 crc kubenswrapper[4911]: I0929 21:28:24.254425 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-m9d9l" podStartSLOduration=3.419081401 podStartE2EDuration="32.254401639s" podCreationTimestamp="2025-09-29 21:27:52 +0000 UTC" firstStartedPulling="2025-09-29 21:27:54.599335563 +0000 UTC m=+152.576448234" lastFinishedPulling="2025-09-29 21:28:23.434655781 +0000 UTC m=+181.411768472" observedRunningTime="2025-09-29 21:28:24.233140526 +0000 UTC m=+182.210253217" watchObservedRunningTime="2025-09-29 21:28:24.254401639 +0000 UTC m=+182.231514320" Sep 29 21:28:24 crc kubenswrapper[4911]: I0929 21:28:24.293196 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-7nx76" podStartSLOduration=2.335850534 podStartE2EDuration="31.293168921s" podCreationTimestamp="2025-09-29 21:27:53 +0000 UTC" firstStartedPulling="2025-09-29 21:27:54.584408001 +0000 UTC m=+152.561520672" lastFinishedPulling="2025-09-29 21:28:23.541726388 +0000 UTC m=+181.518839059" observedRunningTime="2025-09-29 21:28:24.288604184 +0000 UTC m=+182.265716865" watchObservedRunningTime="2025-09-29 21:28:24.293168921 +0000 UTC m=+182.270281592" Sep 29 21:28:24 crc kubenswrapper[4911]: I0929 21:28:24.296515 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-cz97x" podStartSLOduration=4.322234474 podStartE2EDuration="35.296503792s" podCreationTimestamp="2025-09-29 21:27:49 +0000 UTC" firstStartedPulling="2025-09-29 21:27:52.276016547 +0000 UTC m=+150.253129218" lastFinishedPulling="2025-09-29 21:28:23.250285855 +0000 UTC m=+181.227398536" observedRunningTime="2025-09-29 21:28:24.255945966 +0000 UTC m=+182.233058637" watchObservedRunningTime="2025-09-29 21:28:24.296503792 +0000 UTC m=+182.273616463" Sep 29 21:28:24 crc kubenswrapper[4911]: I0929 21:28:24.346159 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2gvvc" Sep 29 21:28:25 crc kubenswrapper[4911]: I0929 21:28:25.210980 4911 patch_prober.go:28] interesting pod/machine-config-daemon-w647f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 21:28:25 crc kubenswrapper[4911]: I0929 21:28:25.211072 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 21:28:29 crc kubenswrapper[4911]: I0929 21:28:29.849277 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-cz97x" Sep 29 21:28:29 crc kubenswrapper[4911]: I0929 21:28:29.850043 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-cz97x" Sep 29 21:28:30 crc kubenswrapper[4911]: I0929 21:28:30.095089 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-qksj7" Sep 29 21:28:30 crc kubenswrapper[4911]: I0929 21:28:30.095177 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-qksj7" Sep 29 21:28:30 crc kubenswrapper[4911]: I0929 21:28:30.239767 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 21:28:30 crc kubenswrapper[4911]: I0929 21:28:30.280029 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-cz97x" Sep 29 21:28:30 crc kubenswrapper[4911]: I0929 21:28:30.280400 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-qksj7" Sep 29 21:28:30 crc kubenswrapper[4911]: I0929 21:28:30.314727 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-tlx2w" Sep 29 21:28:30 crc kubenswrapper[4911]: I0929 21:28:30.314815 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-tlx2w" Sep 29 21:28:30 crc kubenswrapper[4911]: I0929 21:28:30.364353 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-qksj7" Sep 29 21:28:30 crc kubenswrapper[4911]: I0929 21:28:30.365668 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-cz97x" Sep 29 21:28:30 crc kubenswrapper[4911]: I0929 21:28:30.387545 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-tlx2w" Sep 29 21:28:30 crc kubenswrapper[4911]: I0929 21:28:30.735008 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-5kc9d" Sep 29 21:28:30 crc kubenswrapper[4911]: I0929 21:28:30.735090 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-5kc9d" Sep 29 21:28:30 crc kubenswrapper[4911]: I0929 21:28:30.786098 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-5kc9d" Sep 29 21:28:31 crc kubenswrapper[4911]: I0929 21:28:31.245400 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-5kc9d" Sep 29 21:28:31 crc kubenswrapper[4911]: I0929 21:28:31.262675 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-tlx2w" Sep 29 21:28:31 crc kubenswrapper[4911]: I0929 21:28:31.862144 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-hxt4d" Sep 29 21:28:31 crc kubenswrapper[4911]: I0929 21:28:31.862499 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-hxt4d" Sep 29 21:28:31 crc kubenswrapper[4911]: I0929 21:28:31.902625 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-hxt4d" Sep 29 21:28:32 crc kubenswrapper[4911]: I0929 21:28:32.254009 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-hxt4d" Sep 29 21:28:32 crc kubenswrapper[4911]: I0929 21:28:32.518396 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-5kc9d"] Sep 29 21:28:32 crc kubenswrapper[4911]: I0929 21:28:32.723732 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-tlx2w"] Sep 29 21:28:33 crc kubenswrapper[4911]: I0929 21:28:33.109170 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-m9d9l" Sep 29 21:28:33 crc kubenswrapper[4911]: I0929 21:28:33.110516 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-m9d9l" Sep 29 21:28:33 crc kubenswrapper[4911]: I0929 21:28:33.173309 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-m9d9l" Sep 29 21:28:33 crc kubenswrapper[4911]: I0929 21:28:33.218032 4911 generic.go:334] "Generic (PLEG): container finished" podID="5be9e2b9-d97c-40c1-9179-2703b878679b" containerID="4b3b01968aa31cf8aea4e56a86d343184c891ab1640c1cc6b9f10e84a826245b" exitCode=0 Sep 29 21:28:33 crc kubenswrapper[4911]: I0929 21:28:33.218228 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8xxks" event={"ID":"5be9e2b9-d97c-40c1-9179-2703b878679b","Type":"ContainerDied","Data":"4b3b01968aa31cf8aea4e56a86d343184c891ab1640c1cc6b9f10e84a826245b"} Sep 29 21:28:33 crc kubenswrapper[4911]: I0929 21:28:33.219316 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-tlx2w" podUID="e95e31d7-3d9d-4ee6-b1e8-f1595e4e398e" containerName="registry-server" containerID="cri-o://5b538b3c0a19bd319ab64ffe010ca0189dfe513ef9379fa3d78aa0132b30d5a5" gracePeriod=2 Sep 29 21:28:33 crc kubenswrapper[4911]: I0929 21:28:33.219902 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-5kc9d" podUID="f8f3e163-1816-4969-8594-c1a11760793a" containerName="registry-server" containerID="cri-o://e6cf914451d9b8324bf8e8c872ad70aa56b8ba7877bf4f6624b24f36fb2d8a7e" gracePeriod=2 Sep 29 21:28:33 crc kubenswrapper[4911]: I0929 21:28:33.300068 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-m9d9l" Sep 29 21:28:33 crc kubenswrapper[4911]: I0929 21:28:33.526719 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-7nx76" Sep 29 21:28:33 crc kubenswrapper[4911]: I0929 21:28:33.530080 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-7nx76" Sep 29 21:28:33 crc kubenswrapper[4911]: I0929 21:28:33.599432 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-7nx76" Sep 29 21:28:33 crc kubenswrapper[4911]: I0929 21:28:33.652299 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5kc9d" Sep 29 21:28:33 crc kubenswrapper[4911]: I0929 21:28:33.685212 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f8f3e163-1816-4969-8594-c1a11760793a-utilities\") pod \"f8f3e163-1816-4969-8594-c1a11760793a\" (UID: \"f8f3e163-1816-4969-8594-c1a11760793a\") " Sep 29 21:28:33 crc kubenswrapper[4911]: I0929 21:28:33.685325 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f8f3e163-1816-4969-8594-c1a11760793a-catalog-content\") pod \"f8f3e163-1816-4969-8594-c1a11760793a\" (UID: \"f8f3e163-1816-4969-8594-c1a11760793a\") " Sep 29 21:28:33 crc kubenswrapper[4911]: I0929 21:28:33.685430 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-44txz\" (UniqueName: \"kubernetes.io/projected/f8f3e163-1816-4969-8594-c1a11760793a-kube-api-access-44txz\") pod \"f8f3e163-1816-4969-8594-c1a11760793a\" (UID: \"f8f3e163-1816-4969-8594-c1a11760793a\") " Sep 29 21:28:33 crc kubenswrapper[4911]: I0929 21:28:33.688870 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f8f3e163-1816-4969-8594-c1a11760793a-utilities" (OuterVolumeSpecName: "utilities") pod "f8f3e163-1816-4969-8594-c1a11760793a" (UID: "f8f3e163-1816-4969-8594-c1a11760793a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:28:33 crc kubenswrapper[4911]: I0929 21:28:33.696003 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f8f3e163-1816-4969-8594-c1a11760793a-kube-api-access-44txz" (OuterVolumeSpecName: "kube-api-access-44txz") pod "f8f3e163-1816-4969-8594-c1a11760793a" (UID: "f8f3e163-1816-4969-8594-c1a11760793a"). InnerVolumeSpecName "kube-api-access-44txz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:28:33 crc kubenswrapper[4911]: I0929 21:28:33.699623 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tlx2w" Sep 29 21:28:33 crc kubenswrapper[4911]: I0929 21:28:33.744583 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f8f3e163-1816-4969-8594-c1a11760793a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f8f3e163-1816-4969-8594-c1a11760793a" (UID: "f8f3e163-1816-4969-8594-c1a11760793a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:28:33 crc kubenswrapper[4911]: I0929 21:28:33.787406 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e95e31d7-3d9d-4ee6-b1e8-f1595e4e398e-catalog-content\") pod \"e95e31d7-3d9d-4ee6-b1e8-f1595e4e398e\" (UID: \"e95e31d7-3d9d-4ee6-b1e8-f1595e4e398e\") " Sep 29 21:28:33 crc kubenswrapper[4911]: I0929 21:28:33.788526 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e95e31d7-3d9d-4ee6-b1e8-f1595e4e398e-utilities\") pod \"e95e31d7-3d9d-4ee6-b1e8-f1595e4e398e\" (UID: \"e95e31d7-3d9d-4ee6-b1e8-f1595e4e398e\") " Sep 29 21:28:33 crc kubenswrapper[4911]: I0929 21:28:33.788660 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7chz6\" (UniqueName: \"kubernetes.io/projected/e95e31d7-3d9d-4ee6-b1e8-f1595e4e398e-kube-api-access-7chz6\") pod \"e95e31d7-3d9d-4ee6-b1e8-f1595e4e398e\" (UID: \"e95e31d7-3d9d-4ee6-b1e8-f1595e4e398e\") " Sep 29 21:28:33 crc kubenswrapper[4911]: I0929 21:28:33.789224 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f8f3e163-1816-4969-8594-c1a11760793a-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 21:28:33 crc kubenswrapper[4911]: I0929 21:28:33.789427 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f8f3e163-1816-4969-8594-c1a11760793a-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 21:28:33 crc kubenswrapper[4911]: I0929 21:28:33.789523 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-44txz\" (UniqueName: \"kubernetes.io/projected/f8f3e163-1816-4969-8594-c1a11760793a-kube-api-access-44txz\") on node \"crc\" DevicePath \"\"" Sep 29 21:28:33 crc kubenswrapper[4911]: I0929 21:28:33.789737 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e95e31d7-3d9d-4ee6-b1e8-f1595e4e398e-utilities" (OuterVolumeSpecName: "utilities") pod "e95e31d7-3d9d-4ee6-b1e8-f1595e4e398e" (UID: "e95e31d7-3d9d-4ee6-b1e8-f1595e4e398e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:28:33 crc kubenswrapper[4911]: I0929 21:28:33.793199 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e95e31d7-3d9d-4ee6-b1e8-f1595e4e398e-kube-api-access-7chz6" (OuterVolumeSpecName: "kube-api-access-7chz6") pod "e95e31d7-3d9d-4ee6-b1e8-f1595e4e398e" (UID: "e95e31d7-3d9d-4ee6-b1e8-f1595e4e398e"). InnerVolumeSpecName "kube-api-access-7chz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:28:33 crc kubenswrapper[4911]: I0929 21:28:33.860881 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e95e31d7-3d9d-4ee6-b1e8-f1595e4e398e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e95e31d7-3d9d-4ee6-b1e8-f1595e4e398e" (UID: "e95e31d7-3d9d-4ee6-b1e8-f1595e4e398e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:28:33 crc kubenswrapper[4911]: I0929 21:28:33.891077 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e95e31d7-3d9d-4ee6-b1e8-f1595e4e398e-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 21:28:33 crc kubenswrapper[4911]: I0929 21:28:33.891115 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e95e31d7-3d9d-4ee6-b1e8-f1595e4e398e-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 21:28:33 crc kubenswrapper[4911]: I0929 21:28:33.891127 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7chz6\" (UniqueName: \"kubernetes.io/projected/e95e31d7-3d9d-4ee6-b1e8-f1595e4e398e-kube-api-access-7chz6\") on node \"crc\" DevicePath \"\"" Sep 29 21:28:34 crc kubenswrapper[4911]: I0929 21:28:34.230346 4911 generic.go:334] "Generic (PLEG): container finished" podID="e95e31d7-3d9d-4ee6-b1e8-f1595e4e398e" containerID="5b538b3c0a19bd319ab64ffe010ca0189dfe513ef9379fa3d78aa0132b30d5a5" exitCode=0 Sep 29 21:28:34 crc kubenswrapper[4911]: I0929 21:28:34.231000 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tlx2w" event={"ID":"e95e31d7-3d9d-4ee6-b1e8-f1595e4e398e","Type":"ContainerDied","Data":"5b538b3c0a19bd319ab64ffe010ca0189dfe513ef9379fa3d78aa0132b30d5a5"} Sep 29 21:28:34 crc kubenswrapper[4911]: I0929 21:28:34.231287 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tlx2w" event={"ID":"e95e31d7-3d9d-4ee6-b1e8-f1595e4e398e","Type":"ContainerDied","Data":"4f00b5b3656cf8886c0fca8b24c259a030805d214a2e8428c3967bc06fdb067d"} Sep 29 21:28:34 crc kubenswrapper[4911]: I0929 21:28:34.231504 4911 scope.go:117] "RemoveContainer" containerID="5b538b3c0a19bd319ab64ffe010ca0189dfe513ef9379fa3d78aa0132b30d5a5" Sep 29 21:28:34 crc kubenswrapper[4911]: I0929 21:28:34.232026 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tlx2w" Sep 29 21:28:34 crc kubenswrapper[4911]: I0929 21:28:34.240942 4911 generic.go:334] "Generic (PLEG): container finished" podID="f8f3e163-1816-4969-8594-c1a11760793a" containerID="e6cf914451d9b8324bf8e8c872ad70aa56b8ba7877bf4f6624b24f36fb2d8a7e" exitCode=0 Sep 29 21:28:34 crc kubenswrapper[4911]: I0929 21:28:34.241054 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5kc9d" event={"ID":"f8f3e163-1816-4969-8594-c1a11760793a","Type":"ContainerDied","Data":"e6cf914451d9b8324bf8e8c872ad70aa56b8ba7877bf4f6624b24f36fb2d8a7e"} Sep 29 21:28:34 crc kubenswrapper[4911]: I0929 21:28:34.241104 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5kc9d" event={"ID":"f8f3e163-1816-4969-8594-c1a11760793a","Type":"ContainerDied","Data":"62d7509a6ed605a0c4c42ce020ac38df29961a1c7285d4a193ced5fd37de8706"} Sep 29 21:28:34 crc kubenswrapper[4911]: I0929 21:28:34.241245 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5kc9d" Sep 29 21:28:34 crc kubenswrapper[4911]: I0929 21:28:34.247466 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8xxks" event={"ID":"5be9e2b9-d97c-40c1-9179-2703b878679b","Type":"ContainerStarted","Data":"16b53d7640f098bcdb13c3f5df149347f74a3136832c9f2c11b1268c71794ef4"} Sep 29 21:28:34 crc kubenswrapper[4911]: I0929 21:28:34.275894 4911 scope.go:117] "RemoveContainer" containerID="7bc4cd0ed31cf379af73df2fa287212f111aab6772b5954261a5eae4ea4ed2a8" Sep 29 21:28:34 crc kubenswrapper[4911]: I0929 21:28:34.292026 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-8xxks" podStartSLOduration=3.003288587 podStartE2EDuration="43.29199137s" podCreationTimestamp="2025-09-29 21:27:51 +0000 UTC" firstStartedPulling="2025-09-29 21:27:53.467339841 +0000 UTC m=+151.444452512" lastFinishedPulling="2025-09-29 21:28:33.756042624 +0000 UTC m=+191.733155295" observedRunningTime="2025-09-29 21:28:34.27641849 +0000 UTC m=+192.253531171" watchObservedRunningTime="2025-09-29 21:28:34.29199137 +0000 UTC m=+192.269104081" Sep 29 21:28:34 crc kubenswrapper[4911]: I0929 21:28:34.314539 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-tlx2w"] Sep 29 21:28:34 crc kubenswrapper[4911]: I0929 21:28:34.319970 4911 scope.go:117] "RemoveContainer" containerID="b8417bd352fe825d87d1d9a924268f11b9f5e1dc1fc29150226e433f6646ffe4" Sep 29 21:28:34 crc kubenswrapper[4911]: I0929 21:28:34.319987 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-7nx76" Sep 29 21:28:34 crc kubenswrapper[4911]: I0929 21:28:34.323998 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-tlx2w"] Sep 29 21:28:34 crc kubenswrapper[4911]: I0929 21:28:34.331055 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-5kc9d"] Sep 29 21:28:34 crc kubenswrapper[4911]: I0929 21:28:34.339553 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-5kc9d"] Sep 29 21:28:34 crc kubenswrapper[4911]: I0929 21:28:34.347096 4911 scope.go:117] "RemoveContainer" containerID="5b538b3c0a19bd319ab64ffe010ca0189dfe513ef9379fa3d78aa0132b30d5a5" Sep 29 21:28:34 crc kubenswrapper[4911]: E0929 21:28:34.347829 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5b538b3c0a19bd319ab64ffe010ca0189dfe513ef9379fa3d78aa0132b30d5a5\": container with ID starting with 5b538b3c0a19bd319ab64ffe010ca0189dfe513ef9379fa3d78aa0132b30d5a5 not found: ID does not exist" containerID="5b538b3c0a19bd319ab64ffe010ca0189dfe513ef9379fa3d78aa0132b30d5a5" Sep 29 21:28:34 crc kubenswrapper[4911]: I0929 21:28:34.347873 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5b538b3c0a19bd319ab64ffe010ca0189dfe513ef9379fa3d78aa0132b30d5a5"} err="failed to get container status \"5b538b3c0a19bd319ab64ffe010ca0189dfe513ef9379fa3d78aa0132b30d5a5\": rpc error: code = NotFound desc = could not find container \"5b538b3c0a19bd319ab64ffe010ca0189dfe513ef9379fa3d78aa0132b30d5a5\": container with ID starting with 5b538b3c0a19bd319ab64ffe010ca0189dfe513ef9379fa3d78aa0132b30d5a5 not found: ID does not exist" Sep 29 21:28:34 crc kubenswrapper[4911]: I0929 21:28:34.347935 4911 scope.go:117] "RemoveContainer" containerID="7bc4cd0ed31cf379af73df2fa287212f111aab6772b5954261a5eae4ea4ed2a8" Sep 29 21:28:34 crc kubenswrapper[4911]: E0929 21:28:34.348513 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7bc4cd0ed31cf379af73df2fa287212f111aab6772b5954261a5eae4ea4ed2a8\": container with ID starting with 7bc4cd0ed31cf379af73df2fa287212f111aab6772b5954261a5eae4ea4ed2a8 not found: ID does not exist" containerID="7bc4cd0ed31cf379af73df2fa287212f111aab6772b5954261a5eae4ea4ed2a8" Sep 29 21:28:34 crc kubenswrapper[4911]: I0929 21:28:34.348576 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7bc4cd0ed31cf379af73df2fa287212f111aab6772b5954261a5eae4ea4ed2a8"} err="failed to get container status \"7bc4cd0ed31cf379af73df2fa287212f111aab6772b5954261a5eae4ea4ed2a8\": rpc error: code = NotFound desc = could not find container \"7bc4cd0ed31cf379af73df2fa287212f111aab6772b5954261a5eae4ea4ed2a8\": container with ID starting with 7bc4cd0ed31cf379af73df2fa287212f111aab6772b5954261a5eae4ea4ed2a8 not found: ID does not exist" Sep 29 21:28:34 crc kubenswrapper[4911]: I0929 21:28:34.348621 4911 scope.go:117] "RemoveContainer" containerID="b8417bd352fe825d87d1d9a924268f11b9f5e1dc1fc29150226e433f6646ffe4" Sep 29 21:28:34 crc kubenswrapper[4911]: E0929 21:28:34.349114 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b8417bd352fe825d87d1d9a924268f11b9f5e1dc1fc29150226e433f6646ffe4\": container with ID starting with b8417bd352fe825d87d1d9a924268f11b9f5e1dc1fc29150226e433f6646ffe4 not found: ID does not exist" containerID="b8417bd352fe825d87d1d9a924268f11b9f5e1dc1fc29150226e433f6646ffe4" Sep 29 21:28:34 crc kubenswrapper[4911]: I0929 21:28:34.349177 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b8417bd352fe825d87d1d9a924268f11b9f5e1dc1fc29150226e433f6646ffe4"} err="failed to get container status \"b8417bd352fe825d87d1d9a924268f11b9f5e1dc1fc29150226e433f6646ffe4\": rpc error: code = NotFound desc = could not find container \"b8417bd352fe825d87d1d9a924268f11b9f5e1dc1fc29150226e433f6646ffe4\": container with ID starting with b8417bd352fe825d87d1d9a924268f11b9f5e1dc1fc29150226e433f6646ffe4 not found: ID does not exist" Sep 29 21:28:34 crc kubenswrapper[4911]: I0929 21:28:34.349227 4911 scope.go:117] "RemoveContainer" containerID="e6cf914451d9b8324bf8e8c872ad70aa56b8ba7877bf4f6624b24f36fb2d8a7e" Sep 29 21:28:34 crc kubenswrapper[4911]: I0929 21:28:34.391651 4911 scope.go:117] "RemoveContainer" containerID="a5a06a6961d89fa37e8b936f64033afe78278cbfefbec8efd9c28f46ca63cecf" Sep 29 21:28:34 crc kubenswrapper[4911]: I0929 21:28:34.420737 4911 scope.go:117] "RemoveContainer" containerID="f2e77eef62209aeec14c57a8c71ea55031d2e4e4884df9c2339361a508728c84" Sep 29 21:28:34 crc kubenswrapper[4911]: I0929 21:28:34.440841 4911 scope.go:117] "RemoveContainer" containerID="e6cf914451d9b8324bf8e8c872ad70aa56b8ba7877bf4f6624b24f36fb2d8a7e" Sep 29 21:28:34 crc kubenswrapper[4911]: E0929 21:28:34.441417 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e6cf914451d9b8324bf8e8c872ad70aa56b8ba7877bf4f6624b24f36fb2d8a7e\": container with ID starting with e6cf914451d9b8324bf8e8c872ad70aa56b8ba7877bf4f6624b24f36fb2d8a7e not found: ID does not exist" containerID="e6cf914451d9b8324bf8e8c872ad70aa56b8ba7877bf4f6624b24f36fb2d8a7e" Sep 29 21:28:34 crc kubenswrapper[4911]: I0929 21:28:34.441470 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e6cf914451d9b8324bf8e8c872ad70aa56b8ba7877bf4f6624b24f36fb2d8a7e"} err="failed to get container status \"e6cf914451d9b8324bf8e8c872ad70aa56b8ba7877bf4f6624b24f36fb2d8a7e\": rpc error: code = NotFound desc = could not find container \"e6cf914451d9b8324bf8e8c872ad70aa56b8ba7877bf4f6624b24f36fb2d8a7e\": container with ID starting with e6cf914451d9b8324bf8e8c872ad70aa56b8ba7877bf4f6624b24f36fb2d8a7e not found: ID does not exist" Sep 29 21:28:34 crc kubenswrapper[4911]: I0929 21:28:34.441579 4911 scope.go:117] "RemoveContainer" containerID="a5a06a6961d89fa37e8b936f64033afe78278cbfefbec8efd9c28f46ca63cecf" Sep 29 21:28:34 crc kubenswrapper[4911]: E0929 21:28:34.441978 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a5a06a6961d89fa37e8b936f64033afe78278cbfefbec8efd9c28f46ca63cecf\": container with ID starting with a5a06a6961d89fa37e8b936f64033afe78278cbfefbec8efd9c28f46ca63cecf not found: ID does not exist" containerID="a5a06a6961d89fa37e8b936f64033afe78278cbfefbec8efd9c28f46ca63cecf" Sep 29 21:28:34 crc kubenswrapper[4911]: I0929 21:28:34.442014 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a5a06a6961d89fa37e8b936f64033afe78278cbfefbec8efd9c28f46ca63cecf"} err="failed to get container status \"a5a06a6961d89fa37e8b936f64033afe78278cbfefbec8efd9c28f46ca63cecf\": rpc error: code = NotFound desc = could not find container \"a5a06a6961d89fa37e8b936f64033afe78278cbfefbec8efd9c28f46ca63cecf\": container with ID starting with a5a06a6961d89fa37e8b936f64033afe78278cbfefbec8efd9c28f46ca63cecf not found: ID does not exist" Sep 29 21:28:34 crc kubenswrapper[4911]: I0929 21:28:34.442042 4911 scope.go:117] "RemoveContainer" containerID="f2e77eef62209aeec14c57a8c71ea55031d2e4e4884df9c2339361a508728c84" Sep 29 21:28:34 crc kubenswrapper[4911]: E0929 21:28:34.442272 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f2e77eef62209aeec14c57a8c71ea55031d2e4e4884df9c2339361a508728c84\": container with ID starting with f2e77eef62209aeec14c57a8c71ea55031d2e4e4884df9c2339361a508728c84 not found: ID does not exist" containerID="f2e77eef62209aeec14c57a8c71ea55031d2e4e4884df9c2339361a508728c84" Sep 29 21:28:34 crc kubenswrapper[4911]: I0929 21:28:34.442311 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f2e77eef62209aeec14c57a8c71ea55031d2e4e4884df9c2339361a508728c84"} err="failed to get container status \"f2e77eef62209aeec14c57a8c71ea55031d2e4e4884df9c2339361a508728c84\": rpc error: code = NotFound desc = could not find container \"f2e77eef62209aeec14c57a8c71ea55031d2e4e4884df9c2339361a508728c84\": container with ID starting with f2e77eef62209aeec14c57a8c71ea55031d2e4e4884df9c2339361a508728c84 not found: ID does not exist" Sep 29 21:28:34 crc kubenswrapper[4911]: I0929 21:28:34.709406 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e95e31d7-3d9d-4ee6-b1e8-f1595e4e398e" path="/var/lib/kubelet/pods/e95e31d7-3d9d-4ee6-b1e8-f1595e4e398e/volumes" Sep 29 21:28:34 crc kubenswrapper[4911]: I0929 21:28:34.710614 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f8f3e163-1816-4969-8594-c1a11760793a" path="/var/lib/kubelet/pods/f8f3e163-1816-4969-8594-c1a11760793a/volumes" Sep 29 21:28:37 crc kubenswrapper[4911]: I0929 21:28:37.117257 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-7nx76"] Sep 29 21:28:37 crc kubenswrapper[4911]: I0929 21:28:37.268627 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-7nx76" podUID="adc47074-abdd-41ee-b5de-8c483fc77226" containerName="registry-server" containerID="cri-o://2f5a95942f3eb26c2a794b34447ffda3cac5c8b1d713286feb4d1e2224270cda" gracePeriod=2 Sep 29 21:28:37 crc kubenswrapper[4911]: I0929 21:28:37.623502 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7nx76" Sep 29 21:28:37 crc kubenswrapper[4911]: I0929 21:28:37.647960 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/adc47074-abdd-41ee-b5de-8c483fc77226-catalog-content\") pod \"adc47074-abdd-41ee-b5de-8c483fc77226\" (UID: \"adc47074-abdd-41ee-b5de-8c483fc77226\") " Sep 29 21:28:37 crc kubenswrapper[4911]: I0929 21:28:37.648111 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rr6v5\" (UniqueName: \"kubernetes.io/projected/adc47074-abdd-41ee-b5de-8c483fc77226-kube-api-access-rr6v5\") pod \"adc47074-abdd-41ee-b5de-8c483fc77226\" (UID: \"adc47074-abdd-41ee-b5de-8c483fc77226\") " Sep 29 21:28:37 crc kubenswrapper[4911]: I0929 21:28:37.648176 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/adc47074-abdd-41ee-b5de-8c483fc77226-utilities\") pod \"adc47074-abdd-41ee-b5de-8c483fc77226\" (UID: \"adc47074-abdd-41ee-b5de-8c483fc77226\") " Sep 29 21:28:37 crc kubenswrapper[4911]: I0929 21:28:37.651518 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/adc47074-abdd-41ee-b5de-8c483fc77226-utilities" (OuterVolumeSpecName: "utilities") pod "adc47074-abdd-41ee-b5de-8c483fc77226" (UID: "adc47074-abdd-41ee-b5de-8c483fc77226"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:28:37 crc kubenswrapper[4911]: I0929 21:28:37.654946 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/adc47074-abdd-41ee-b5de-8c483fc77226-kube-api-access-rr6v5" (OuterVolumeSpecName: "kube-api-access-rr6v5") pod "adc47074-abdd-41ee-b5de-8c483fc77226" (UID: "adc47074-abdd-41ee-b5de-8c483fc77226"). InnerVolumeSpecName "kube-api-access-rr6v5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:28:37 crc kubenswrapper[4911]: I0929 21:28:37.739199 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/adc47074-abdd-41ee-b5de-8c483fc77226-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "adc47074-abdd-41ee-b5de-8c483fc77226" (UID: "adc47074-abdd-41ee-b5de-8c483fc77226"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:28:37 crc kubenswrapper[4911]: I0929 21:28:37.749739 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/adc47074-abdd-41ee-b5de-8c483fc77226-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 21:28:37 crc kubenswrapper[4911]: I0929 21:28:37.749769 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rr6v5\" (UniqueName: \"kubernetes.io/projected/adc47074-abdd-41ee-b5de-8c483fc77226-kube-api-access-rr6v5\") on node \"crc\" DevicePath \"\"" Sep 29 21:28:37 crc kubenswrapper[4911]: I0929 21:28:37.749782 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/adc47074-abdd-41ee-b5de-8c483fc77226-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 21:28:38 crc kubenswrapper[4911]: I0929 21:28:38.277610 4911 generic.go:334] "Generic (PLEG): container finished" podID="adc47074-abdd-41ee-b5de-8c483fc77226" containerID="2f5a95942f3eb26c2a794b34447ffda3cac5c8b1d713286feb4d1e2224270cda" exitCode=0 Sep 29 21:28:38 crc kubenswrapper[4911]: I0929 21:28:38.277672 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7nx76" event={"ID":"adc47074-abdd-41ee-b5de-8c483fc77226","Type":"ContainerDied","Data":"2f5a95942f3eb26c2a794b34447ffda3cac5c8b1d713286feb4d1e2224270cda"} Sep 29 21:28:38 crc kubenswrapper[4911]: I0929 21:28:38.277692 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7nx76" Sep 29 21:28:38 crc kubenswrapper[4911]: I0929 21:28:38.277715 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7nx76" event={"ID":"adc47074-abdd-41ee-b5de-8c483fc77226","Type":"ContainerDied","Data":"e98bcbc74a4ecbf637608dd642a689384a4969a51cc87e64cdcde8008b428ce2"} Sep 29 21:28:38 crc kubenswrapper[4911]: I0929 21:28:38.277741 4911 scope.go:117] "RemoveContainer" containerID="2f5a95942f3eb26c2a794b34447ffda3cac5c8b1d713286feb4d1e2224270cda" Sep 29 21:28:38 crc kubenswrapper[4911]: I0929 21:28:38.300345 4911 scope.go:117] "RemoveContainer" containerID="55d3c6c2c750c675da2f64002d1871761b59888320dfafc7c3ee79fa000be824" Sep 29 21:28:38 crc kubenswrapper[4911]: I0929 21:28:38.311986 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-7nx76"] Sep 29 21:28:38 crc kubenswrapper[4911]: I0929 21:28:38.316245 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-7nx76"] Sep 29 21:28:38 crc kubenswrapper[4911]: I0929 21:28:38.328934 4911 scope.go:117] "RemoveContainer" containerID="46030920e5eade1f836a7c1b21a8c0673a671388397003bec977855eef1c005f" Sep 29 21:28:38 crc kubenswrapper[4911]: I0929 21:28:38.345962 4911 scope.go:117] "RemoveContainer" containerID="2f5a95942f3eb26c2a794b34447ffda3cac5c8b1d713286feb4d1e2224270cda" Sep 29 21:28:38 crc kubenswrapper[4911]: E0929 21:28:38.347206 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2f5a95942f3eb26c2a794b34447ffda3cac5c8b1d713286feb4d1e2224270cda\": container with ID starting with 2f5a95942f3eb26c2a794b34447ffda3cac5c8b1d713286feb4d1e2224270cda not found: ID does not exist" containerID="2f5a95942f3eb26c2a794b34447ffda3cac5c8b1d713286feb4d1e2224270cda" Sep 29 21:28:38 crc kubenswrapper[4911]: I0929 21:28:38.347325 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f5a95942f3eb26c2a794b34447ffda3cac5c8b1d713286feb4d1e2224270cda"} err="failed to get container status \"2f5a95942f3eb26c2a794b34447ffda3cac5c8b1d713286feb4d1e2224270cda\": rpc error: code = NotFound desc = could not find container \"2f5a95942f3eb26c2a794b34447ffda3cac5c8b1d713286feb4d1e2224270cda\": container with ID starting with 2f5a95942f3eb26c2a794b34447ffda3cac5c8b1d713286feb4d1e2224270cda not found: ID does not exist" Sep 29 21:28:38 crc kubenswrapper[4911]: I0929 21:28:38.347432 4911 scope.go:117] "RemoveContainer" containerID="55d3c6c2c750c675da2f64002d1871761b59888320dfafc7c3ee79fa000be824" Sep 29 21:28:38 crc kubenswrapper[4911]: E0929 21:28:38.348057 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"55d3c6c2c750c675da2f64002d1871761b59888320dfafc7c3ee79fa000be824\": container with ID starting with 55d3c6c2c750c675da2f64002d1871761b59888320dfafc7c3ee79fa000be824 not found: ID does not exist" containerID="55d3c6c2c750c675da2f64002d1871761b59888320dfafc7c3ee79fa000be824" Sep 29 21:28:38 crc kubenswrapper[4911]: I0929 21:28:38.348104 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"55d3c6c2c750c675da2f64002d1871761b59888320dfafc7c3ee79fa000be824"} err="failed to get container status \"55d3c6c2c750c675da2f64002d1871761b59888320dfafc7c3ee79fa000be824\": rpc error: code = NotFound desc = could not find container \"55d3c6c2c750c675da2f64002d1871761b59888320dfafc7c3ee79fa000be824\": container with ID starting with 55d3c6c2c750c675da2f64002d1871761b59888320dfafc7c3ee79fa000be824 not found: ID does not exist" Sep 29 21:28:38 crc kubenswrapper[4911]: I0929 21:28:38.348145 4911 scope.go:117] "RemoveContainer" containerID="46030920e5eade1f836a7c1b21a8c0673a671388397003bec977855eef1c005f" Sep 29 21:28:38 crc kubenswrapper[4911]: E0929 21:28:38.348423 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"46030920e5eade1f836a7c1b21a8c0673a671388397003bec977855eef1c005f\": container with ID starting with 46030920e5eade1f836a7c1b21a8c0673a671388397003bec977855eef1c005f not found: ID does not exist" containerID="46030920e5eade1f836a7c1b21a8c0673a671388397003bec977855eef1c005f" Sep 29 21:28:38 crc kubenswrapper[4911]: I0929 21:28:38.348451 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"46030920e5eade1f836a7c1b21a8c0673a671388397003bec977855eef1c005f"} err="failed to get container status \"46030920e5eade1f836a7c1b21a8c0673a671388397003bec977855eef1c005f\": rpc error: code = NotFound desc = could not find container \"46030920e5eade1f836a7c1b21a8c0673a671388397003bec977855eef1c005f\": container with ID starting with 46030920e5eade1f836a7c1b21a8c0673a671388397003bec977855eef1c005f not found: ID does not exist" Sep 29 21:28:38 crc kubenswrapper[4911]: I0929 21:28:38.716281 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="adc47074-abdd-41ee-b5de-8c483fc77226" path="/var/lib/kubelet/pods/adc47074-abdd-41ee-b5de-8c483fc77226/volumes" Sep 29 21:28:42 crc kubenswrapper[4911]: I0929 21:28:42.329167 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-8xxks" Sep 29 21:28:42 crc kubenswrapper[4911]: I0929 21:28:42.329693 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-8xxks" Sep 29 21:28:42 crc kubenswrapper[4911]: I0929 21:28:42.397271 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-8xxks" Sep 29 21:28:43 crc kubenswrapper[4911]: I0929 21:28:43.374126 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-8xxks" Sep 29 21:28:45 crc kubenswrapper[4911]: I0929 21:28:45.518756 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-8xxks"] Sep 29 21:28:45 crc kubenswrapper[4911]: I0929 21:28:45.520325 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-8xxks" podUID="5be9e2b9-d97c-40c1-9179-2703b878679b" containerName="registry-server" containerID="cri-o://16b53d7640f098bcdb13c3f5df149347f74a3136832c9f2c11b1268c71794ef4" gracePeriod=2 Sep 29 21:28:45 crc kubenswrapper[4911]: I0929 21:28:45.921244 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8xxks" Sep 29 21:28:45 crc kubenswrapper[4911]: I0929 21:28:45.969825 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5be9e2b9-d97c-40c1-9179-2703b878679b-catalog-content\") pod \"5be9e2b9-d97c-40c1-9179-2703b878679b\" (UID: \"5be9e2b9-d97c-40c1-9179-2703b878679b\") " Sep 29 21:28:45 crc kubenswrapper[4911]: I0929 21:28:45.970006 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5be9e2b9-d97c-40c1-9179-2703b878679b-utilities\") pod \"5be9e2b9-d97c-40c1-9179-2703b878679b\" (UID: \"5be9e2b9-d97c-40c1-9179-2703b878679b\") " Sep 29 21:28:45 crc kubenswrapper[4911]: I0929 21:28:45.970041 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6jn4k\" (UniqueName: \"kubernetes.io/projected/5be9e2b9-d97c-40c1-9179-2703b878679b-kube-api-access-6jn4k\") pod \"5be9e2b9-d97c-40c1-9179-2703b878679b\" (UID: \"5be9e2b9-d97c-40c1-9179-2703b878679b\") " Sep 29 21:28:45 crc kubenswrapper[4911]: I0929 21:28:45.971815 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5be9e2b9-d97c-40c1-9179-2703b878679b-utilities" (OuterVolumeSpecName: "utilities") pod "5be9e2b9-d97c-40c1-9179-2703b878679b" (UID: "5be9e2b9-d97c-40c1-9179-2703b878679b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:28:45 crc kubenswrapper[4911]: I0929 21:28:45.987028 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5be9e2b9-d97c-40c1-9179-2703b878679b-kube-api-access-6jn4k" (OuterVolumeSpecName: "kube-api-access-6jn4k") pod "5be9e2b9-d97c-40c1-9179-2703b878679b" (UID: "5be9e2b9-d97c-40c1-9179-2703b878679b"). InnerVolumeSpecName "kube-api-access-6jn4k". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:28:46 crc kubenswrapper[4911]: I0929 21:28:46.000151 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5be9e2b9-d97c-40c1-9179-2703b878679b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5be9e2b9-d97c-40c1-9179-2703b878679b" (UID: "5be9e2b9-d97c-40c1-9179-2703b878679b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:28:46 crc kubenswrapper[4911]: I0929 21:28:46.071085 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5be9e2b9-d97c-40c1-9179-2703b878679b-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 21:28:46 crc kubenswrapper[4911]: I0929 21:28:46.071123 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6jn4k\" (UniqueName: \"kubernetes.io/projected/5be9e2b9-d97c-40c1-9179-2703b878679b-kube-api-access-6jn4k\") on node \"crc\" DevicePath \"\"" Sep 29 21:28:46 crc kubenswrapper[4911]: I0929 21:28:46.071135 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5be9e2b9-d97c-40c1-9179-2703b878679b-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 21:28:46 crc kubenswrapper[4911]: I0929 21:28:46.349352 4911 generic.go:334] "Generic (PLEG): container finished" podID="5be9e2b9-d97c-40c1-9179-2703b878679b" containerID="16b53d7640f098bcdb13c3f5df149347f74a3136832c9f2c11b1268c71794ef4" exitCode=0 Sep 29 21:28:46 crc kubenswrapper[4911]: I0929 21:28:46.349422 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8xxks" event={"ID":"5be9e2b9-d97c-40c1-9179-2703b878679b","Type":"ContainerDied","Data":"16b53d7640f098bcdb13c3f5df149347f74a3136832c9f2c11b1268c71794ef4"} Sep 29 21:28:46 crc kubenswrapper[4911]: I0929 21:28:46.349461 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8xxks" event={"ID":"5be9e2b9-d97c-40c1-9179-2703b878679b","Type":"ContainerDied","Data":"809523b63ce5f678b446777ccb64d99daa9545602c700a3fd356fb4983de3917"} Sep 29 21:28:46 crc kubenswrapper[4911]: I0929 21:28:46.349485 4911 scope.go:117] "RemoveContainer" containerID="16b53d7640f098bcdb13c3f5df149347f74a3136832c9f2c11b1268c71794ef4" Sep 29 21:28:46 crc kubenswrapper[4911]: I0929 21:28:46.349480 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8xxks" Sep 29 21:28:46 crc kubenswrapper[4911]: I0929 21:28:46.371745 4911 scope.go:117] "RemoveContainer" containerID="4b3b01968aa31cf8aea4e56a86d343184c891ab1640c1cc6b9f10e84a826245b" Sep 29 21:28:46 crc kubenswrapper[4911]: I0929 21:28:46.385209 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-8xxks"] Sep 29 21:28:46 crc kubenswrapper[4911]: I0929 21:28:46.401124 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-8xxks"] Sep 29 21:28:46 crc kubenswrapper[4911]: I0929 21:28:46.401662 4911 scope.go:117] "RemoveContainer" containerID="2a5dffc331256b3351c63628c0a607f462efe380197a09b63a7440dac4760428" Sep 29 21:28:46 crc kubenswrapper[4911]: I0929 21:28:46.420411 4911 scope.go:117] "RemoveContainer" containerID="16b53d7640f098bcdb13c3f5df149347f74a3136832c9f2c11b1268c71794ef4" Sep 29 21:28:46 crc kubenswrapper[4911]: E0929 21:28:46.421113 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"16b53d7640f098bcdb13c3f5df149347f74a3136832c9f2c11b1268c71794ef4\": container with ID starting with 16b53d7640f098bcdb13c3f5df149347f74a3136832c9f2c11b1268c71794ef4 not found: ID does not exist" containerID="16b53d7640f098bcdb13c3f5df149347f74a3136832c9f2c11b1268c71794ef4" Sep 29 21:28:46 crc kubenswrapper[4911]: I0929 21:28:46.421254 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"16b53d7640f098bcdb13c3f5df149347f74a3136832c9f2c11b1268c71794ef4"} err="failed to get container status \"16b53d7640f098bcdb13c3f5df149347f74a3136832c9f2c11b1268c71794ef4\": rpc error: code = NotFound desc = could not find container \"16b53d7640f098bcdb13c3f5df149347f74a3136832c9f2c11b1268c71794ef4\": container with ID starting with 16b53d7640f098bcdb13c3f5df149347f74a3136832c9f2c11b1268c71794ef4 not found: ID does not exist" Sep 29 21:28:46 crc kubenswrapper[4911]: I0929 21:28:46.421441 4911 scope.go:117] "RemoveContainer" containerID="4b3b01968aa31cf8aea4e56a86d343184c891ab1640c1cc6b9f10e84a826245b" Sep 29 21:28:46 crc kubenswrapper[4911]: E0929 21:28:46.422161 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4b3b01968aa31cf8aea4e56a86d343184c891ab1640c1cc6b9f10e84a826245b\": container with ID starting with 4b3b01968aa31cf8aea4e56a86d343184c891ab1640c1cc6b9f10e84a826245b not found: ID does not exist" containerID="4b3b01968aa31cf8aea4e56a86d343184c891ab1640c1cc6b9f10e84a826245b" Sep 29 21:28:46 crc kubenswrapper[4911]: I0929 21:28:46.422258 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4b3b01968aa31cf8aea4e56a86d343184c891ab1640c1cc6b9f10e84a826245b"} err="failed to get container status \"4b3b01968aa31cf8aea4e56a86d343184c891ab1640c1cc6b9f10e84a826245b\": rpc error: code = NotFound desc = could not find container \"4b3b01968aa31cf8aea4e56a86d343184c891ab1640c1cc6b9f10e84a826245b\": container with ID starting with 4b3b01968aa31cf8aea4e56a86d343184c891ab1640c1cc6b9f10e84a826245b not found: ID does not exist" Sep 29 21:28:46 crc kubenswrapper[4911]: I0929 21:28:46.422327 4911 scope.go:117] "RemoveContainer" containerID="2a5dffc331256b3351c63628c0a607f462efe380197a09b63a7440dac4760428" Sep 29 21:28:46 crc kubenswrapper[4911]: E0929 21:28:46.422813 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2a5dffc331256b3351c63628c0a607f462efe380197a09b63a7440dac4760428\": container with ID starting with 2a5dffc331256b3351c63628c0a607f462efe380197a09b63a7440dac4760428 not found: ID does not exist" containerID="2a5dffc331256b3351c63628c0a607f462efe380197a09b63a7440dac4760428" Sep 29 21:28:46 crc kubenswrapper[4911]: I0929 21:28:46.422893 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2a5dffc331256b3351c63628c0a607f462efe380197a09b63a7440dac4760428"} err="failed to get container status \"2a5dffc331256b3351c63628c0a607f462efe380197a09b63a7440dac4760428\": rpc error: code = NotFound desc = could not find container \"2a5dffc331256b3351c63628c0a607f462efe380197a09b63a7440dac4760428\": container with ID starting with 2a5dffc331256b3351c63628c0a607f462efe380197a09b63a7440dac4760428 not found: ID does not exist" Sep 29 21:28:46 crc kubenswrapper[4911]: I0929 21:28:46.708125 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5be9e2b9-d97c-40c1-9179-2703b878679b" path="/var/lib/kubelet/pods/5be9e2b9-d97c-40c1-9179-2703b878679b/volumes" Sep 29 21:28:55 crc kubenswrapper[4911]: I0929 21:28:55.211748 4911 patch_prober.go:28] interesting pod/machine-config-daemon-w647f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 21:28:55 crc kubenswrapper[4911]: I0929 21:28:55.212728 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 21:28:55 crc kubenswrapper[4911]: I0929 21:28:55.212846 4911 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-w647f" Sep 29 21:28:55 crc kubenswrapper[4911]: I0929 21:28:55.213969 4911 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"568d099d7beea914cf9f9eb703e36dd179359d3f4406bae454334e39b0de79ca"} pod="openshift-machine-config-operator/machine-config-daemon-w647f" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 21:28:55 crc kubenswrapper[4911]: I0929 21:28:55.214074 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" containerName="machine-config-daemon" containerID="cri-o://568d099d7beea914cf9f9eb703e36dd179359d3f4406bae454334e39b0de79ca" gracePeriod=600 Sep 29 21:28:55 crc kubenswrapper[4911]: I0929 21:28:55.441669 4911 generic.go:334] "Generic (PLEG): container finished" podID="50640abc-40db-4390-82d1-f3cfc76da71c" containerID="568d099d7beea914cf9f9eb703e36dd179359d3f4406bae454334e39b0de79ca" exitCode=0 Sep 29 21:28:55 crc kubenswrapper[4911]: I0929 21:28:55.441816 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w647f" event={"ID":"50640abc-40db-4390-82d1-f3cfc76da71c","Type":"ContainerDied","Data":"568d099d7beea914cf9f9eb703e36dd179359d3f4406bae454334e39b0de79ca"} Sep 29 21:28:56 crc kubenswrapper[4911]: I0929 21:28:56.452924 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w647f" event={"ID":"50640abc-40db-4390-82d1-f3cfc76da71c","Type":"ContainerStarted","Data":"e38c1fe31b443189675002028c28967c8680009a80ff20447fa8074033d6557e"} Sep 29 21:29:09 crc kubenswrapper[4911]: I0929 21:29:09.154462 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-h9qcg"] Sep 29 21:29:09 crc kubenswrapper[4911]: I0929 21:29:09.543363 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-cz97x"] Sep 29 21:29:09 crc kubenswrapper[4911]: I0929 21:29:09.544055 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-cz97x" podUID="4f7f281b-93f4-42fe-9996-2a5a4860ce88" containerName="registry-server" containerID="cri-o://120c1d0845d25dba4daeccf45261faf753dbb38bf6d14700cd5b36ac8a91c27d" gracePeriod=30 Sep 29 21:29:09 crc kubenswrapper[4911]: I0929 21:29:09.550780 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-qksj7"] Sep 29 21:29:09 crc kubenswrapper[4911]: I0929 21:29:09.551168 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-qksj7" podUID="2f3bc61e-17ba-4f89-b582-1d4efd6d7146" containerName="registry-server" containerID="cri-o://e710e1ab6de81605b0a0c0eaeaa0f951f26d3bd35a1f1c42643dc2379e99e155" gracePeriod=30 Sep 29 21:29:09 crc kubenswrapper[4911]: I0929 21:29:09.563314 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-s8dhg"] Sep 29 21:29:09 crc kubenswrapper[4911]: I0929 21:29:09.563592 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-s8dhg" podUID="615ffb8b-fb38-488c-b326-df6086017073" containerName="marketplace-operator" containerID="cri-o://6b1da2c46960ab94bdd010db042ac3c495610e1966348fb29478628532259603" gracePeriod=30 Sep 29 21:29:09 crc kubenswrapper[4911]: I0929 21:29:09.571389 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-hxt4d"] Sep 29 21:29:09 crc kubenswrapper[4911]: I0929 21:29:09.571685 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-hxt4d" podUID="596b137a-5101-4a31-85b8-050945c1de9b" containerName="registry-server" containerID="cri-o://1b044fa87a9a6c81046846c12f98d0e91b26ce215d17b0f51930a6bec0c267a2" gracePeriod=30 Sep 29 21:29:09 crc kubenswrapper[4911]: I0929 21:29:09.582415 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-m9d9l"] Sep 29 21:29:09 crc kubenswrapper[4911]: I0929 21:29:09.582622 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-m9d9l" podUID="fd54ef5c-5d59-4326-aba3-ba1a915313c9" containerName="registry-server" containerID="cri-o://93c28e9ee4e904641fc45aff85da17b17330556461b41fdb193063169f31ee72" gracePeriod=30 Sep 29 21:29:09 crc kubenswrapper[4911]: I0929 21:29:09.588840 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-pbd48"] Sep 29 21:29:09 crc kubenswrapper[4911]: E0929 21:29:09.589096 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e95e31d7-3d9d-4ee6-b1e8-f1595e4e398e" containerName="extract-utilities" Sep 29 21:29:09 crc kubenswrapper[4911]: I0929 21:29:09.589114 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="e95e31d7-3d9d-4ee6-b1e8-f1595e4e398e" containerName="extract-utilities" Sep 29 21:29:09 crc kubenswrapper[4911]: E0929 21:29:09.589127 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8f3e163-1816-4969-8594-c1a11760793a" containerName="extract-utilities" Sep 29 21:29:09 crc kubenswrapper[4911]: I0929 21:29:09.589137 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8f3e163-1816-4969-8594-c1a11760793a" containerName="extract-utilities" Sep 29 21:29:09 crc kubenswrapper[4911]: E0929 21:29:09.589144 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="587286a6-f2ab-423c-8569-9f9d2b103edf" containerName="pruner" Sep 29 21:29:09 crc kubenswrapper[4911]: I0929 21:29:09.589151 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="587286a6-f2ab-423c-8569-9f9d2b103edf" containerName="pruner" Sep 29 21:29:09 crc kubenswrapper[4911]: E0929 21:29:09.589159 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5be9e2b9-d97c-40c1-9179-2703b878679b" containerName="extract-utilities" Sep 29 21:29:09 crc kubenswrapper[4911]: I0929 21:29:09.589167 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="5be9e2b9-d97c-40c1-9179-2703b878679b" containerName="extract-utilities" Sep 29 21:29:09 crc kubenswrapper[4911]: E0929 21:29:09.589174 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="adc47074-abdd-41ee-b5de-8c483fc77226" containerName="registry-server" Sep 29 21:29:09 crc kubenswrapper[4911]: I0929 21:29:09.589180 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="adc47074-abdd-41ee-b5de-8c483fc77226" containerName="registry-server" Sep 29 21:29:09 crc kubenswrapper[4911]: E0929 21:29:09.589191 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="adc47074-abdd-41ee-b5de-8c483fc77226" containerName="extract-content" Sep 29 21:29:09 crc kubenswrapper[4911]: I0929 21:29:09.589197 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="adc47074-abdd-41ee-b5de-8c483fc77226" containerName="extract-content" Sep 29 21:29:09 crc kubenswrapper[4911]: E0929 21:29:09.589207 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5be9e2b9-d97c-40c1-9179-2703b878679b" containerName="extract-content" Sep 29 21:29:09 crc kubenswrapper[4911]: I0929 21:29:09.589213 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="5be9e2b9-d97c-40c1-9179-2703b878679b" containerName="extract-content" Sep 29 21:29:09 crc kubenswrapper[4911]: E0929 21:29:09.589224 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5bb17fe-bdd3-4550-86f9-6a75f6ae8801" containerName="pruner" Sep 29 21:29:09 crc kubenswrapper[4911]: I0929 21:29:09.589231 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5bb17fe-bdd3-4550-86f9-6a75f6ae8801" containerName="pruner" Sep 29 21:29:09 crc kubenswrapper[4911]: E0929 21:29:09.589239 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="adc47074-abdd-41ee-b5de-8c483fc77226" containerName="extract-utilities" Sep 29 21:29:09 crc kubenswrapper[4911]: I0929 21:29:09.589248 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="adc47074-abdd-41ee-b5de-8c483fc77226" containerName="extract-utilities" Sep 29 21:29:09 crc kubenswrapper[4911]: E0929 21:29:09.589257 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e95e31d7-3d9d-4ee6-b1e8-f1595e4e398e" containerName="registry-server" Sep 29 21:29:09 crc kubenswrapper[4911]: I0929 21:29:09.589263 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="e95e31d7-3d9d-4ee6-b1e8-f1595e4e398e" containerName="registry-server" Sep 29 21:29:09 crc kubenswrapper[4911]: E0929 21:29:09.589276 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8f3e163-1816-4969-8594-c1a11760793a" containerName="extract-content" Sep 29 21:29:09 crc kubenswrapper[4911]: I0929 21:29:09.589282 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8f3e163-1816-4969-8594-c1a11760793a" containerName="extract-content" Sep 29 21:29:09 crc kubenswrapper[4911]: E0929 21:29:09.589298 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8f3e163-1816-4969-8594-c1a11760793a" containerName="registry-server" Sep 29 21:29:09 crc kubenswrapper[4911]: I0929 21:29:09.589303 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8f3e163-1816-4969-8594-c1a11760793a" containerName="registry-server" Sep 29 21:29:09 crc kubenswrapper[4911]: E0929 21:29:09.589313 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5be9e2b9-d97c-40c1-9179-2703b878679b" containerName="registry-server" Sep 29 21:29:09 crc kubenswrapper[4911]: I0929 21:29:09.589319 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="5be9e2b9-d97c-40c1-9179-2703b878679b" containerName="registry-server" Sep 29 21:29:09 crc kubenswrapper[4911]: E0929 21:29:09.589330 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e95e31d7-3d9d-4ee6-b1e8-f1595e4e398e" containerName="extract-content" Sep 29 21:29:09 crc kubenswrapper[4911]: I0929 21:29:09.589335 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="e95e31d7-3d9d-4ee6-b1e8-f1595e4e398e" containerName="extract-content" Sep 29 21:29:09 crc kubenswrapper[4911]: I0929 21:29:09.589423 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="f8f3e163-1816-4969-8594-c1a11760793a" containerName="registry-server" Sep 29 21:29:09 crc kubenswrapper[4911]: I0929 21:29:09.589434 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="e95e31d7-3d9d-4ee6-b1e8-f1595e4e398e" containerName="registry-server" Sep 29 21:29:09 crc kubenswrapper[4911]: I0929 21:29:09.589441 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="adc47074-abdd-41ee-b5de-8c483fc77226" containerName="registry-server" Sep 29 21:29:09 crc kubenswrapper[4911]: I0929 21:29:09.589449 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="587286a6-f2ab-423c-8569-9f9d2b103edf" containerName="pruner" Sep 29 21:29:09 crc kubenswrapper[4911]: I0929 21:29:09.589457 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5bb17fe-bdd3-4550-86f9-6a75f6ae8801" containerName="pruner" Sep 29 21:29:09 crc kubenswrapper[4911]: I0929 21:29:09.589466 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="5be9e2b9-d97c-40c1-9179-2703b878679b" containerName="registry-server" Sep 29 21:29:09 crc kubenswrapper[4911]: I0929 21:29:09.592297 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-pbd48" Sep 29 21:29:09 crc kubenswrapper[4911]: I0929 21:29:09.609934 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-pbd48"] Sep 29 21:29:09 crc kubenswrapper[4911]: I0929 21:29:09.705094 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k7qnf\" (UniqueName: \"kubernetes.io/projected/8089b532-3c10-498a-9558-7b5d845d6c7e-kube-api-access-k7qnf\") pod \"marketplace-operator-79b997595-pbd48\" (UID: \"8089b532-3c10-498a-9558-7b5d845d6c7e\") " pod="openshift-marketplace/marketplace-operator-79b997595-pbd48" Sep 29 21:29:09 crc kubenswrapper[4911]: I0929 21:29:09.705228 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8089b532-3c10-498a-9558-7b5d845d6c7e-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-pbd48\" (UID: \"8089b532-3c10-498a-9558-7b5d845d6c7e\") " pod="openshift-marketplace/marketplace-operator-79b997595-pbd48" Sep 29 21:29:09 crc kubenswrapper[4911]: I0929 21:29:09.705411 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/8089b532-3c10-498a-9558-7b5d845d6c7e-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-pbd48\" (UID: \"8089b532-3c10-498a-9558-7b5d845d6c7e\") " pod="openshift-marketplace/marketplace-operator-79b997595-pbd48" Sep 29 21:29:09 crc kubenswrapper[4911]: I0929 21:29:09.809545 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/8089b532-3c10-498a-9558-7b5d845d6c7e-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-pbd48\" (UID: \"8089b532-3c10-498a-9558-7b5d845d6c7e\") " pod="openshift-marketplace/marketplace-operator-79b997595-pbd48" Sep 29 21:29:09 crc kubenswrapper[4911]: I0929 21:29:09.809620 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k7qnf\" (UniqueName: \"kubernetes.io/projected/8089b532-3c10-498a-9558-7b5d845d6c7e-kube-api-access-k7qnf\") pod \"marketplace-operator-79b997595-pbd48\" (UID: \"8089b532-3c10-498a-9558-7b5d845d6c7e\") " pod="openshift-marketplace/marketplace-operator-79b997595-pbd48" Sep 29 21:29:09 crc kubenswrapper[4911]: I0929 21:29:09.809655 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8089b532-3c10-498a-9558-7b5d845d6c7e-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-pbd48\" (UID: \"8089b532-3c10-498a-9558-7b5d845d6c7e\") " pod="openshift-marketplace/marketplace-operator-79b997595-pbd48" Sep 29 21:29:09 crc kubenswrapper[4911]: I0929 21:29:09.812975 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8089b532-3c10-498a-9558-7b5d845d6c7e-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-pbd48\" (UID: \"8089b532-3c10-498a-9558-7b5d845d6c7e\") " pod="openshift-marketplace/marketplace-operator-79b997595-pbd48" Sep 29 21:29:09 crc kubenswrapper[4911]: I0929 21:29:09.822199 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/8089b532-3c10-498a-9558-7b5d845d6c7e-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-pbd48\" (UID: \"8089b532-3c10-498a-9558-7b5d845d6c7e\") " pod="openshift-marketplace/marketplace-operator-79b997595-pbd48" Sep 29 21:29:09 crc kubenswrapper[4911]: I0929 21:29:09.831756 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k7qnf\" (UniqueName: \"kubernetes.io/projected/8089b532-3c10-498a-9558-7b5d845d6c7e-kube-api-access-k7qnf\") pod \"marketplace-operator-79b997595-pbd48\" (UID: \"8089b532-3c10-498a-9558-7b5d845d6c7e\") " pod="openshift-marketplace/marketplace-operator-79b997595-pbd48" Sep 29 21:29:09 crc kubenswrapper[4911]: E0929 21:29:09.850477 4911 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 120c1d0845d25dba4daeccf45261faf753dbb38bf6d14700cd5b36ac8a91c27d is running failed: container process not found" containerID="120c1d0845d25dba4daeccf45261faf753dbb38bf6d14700cd5b36ac8a91c27d" cmd=["grpc_health_probe","-addr=:50051"] Sep 29 21:29:09 crc kubenswrapper[4911]: E0929 21:29:09.850988 4911 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 120c1d0845d25dba4daeccf45261faf753dbb38bf6d14700cd5b36ac8a91c27d is running failed: container process not found" containerID="120c1d0845d25dba4daeccf45261faf753dbb38bf6d14700cd5b36ac8a91c27d" cmd=["grpc_health_probe","-addr=:50051"] Sep 29 21:29:09 crc kubenswrapper[4911]: E0929 21:29:09.851274 4911 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 120c1d0845d25dba4daeccf45261faf753dbb38bf6d14700cd5b36ac8a91c27d is running failed: container process not found" containerID="120c1d0845d25dba4daeccf45261faf753dbb38bf6d14700cd5b36ac8a91c27d" cmd=["grpc_health_probe","-addr=:50051"] Sep 29 21:29:09 crc kubenswrapper[4911]: E0929 21:29:09.851319 4911 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 120c1d0845d25dba4daeccf45261faf753dbb38bf6d14700cd5b36ac8a91c27d is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/certified-operators-cz97x" podUID="4f7f281b-93f4-42fe-9996-2a5a4860ce88" containerName="registry-server" Sep 29 21:29:09 crc kubenswrapper[4911]: I0929 21:29:09.987747 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-pbd48" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.000431 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-s8dhg" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.007876 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hxt4d" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.038408 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m9d9l" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.047971 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qksj7" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.115405 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/596b137a-5101-4a31-85b8-050945c1de9b-catalog-content\") pod \"596b137a-5101-4a31-85b8-050945c1de9b\" (UID: \"596b137a-5101-4a31-85b8-050945c1de9b\") " Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.115467 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qkznl\" (UniqueName: \"kubernetes.io/projected/596b137a-5101-4a31-85b8-050945c1de9b-kube-api-access-qkznl\") pod \"596b137a-5101-4a31-85b8-050945c1de9b\" (UID: \"596b137a-5101-4a31-85b8-050945c1de9b\") " Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.115497 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xg5w7\" (UniqueName: \"kubernetes.io/projected/615ffb8b-fb38-488c-b326-df6086017073-kube-api-access-xg5w7\") pod \"615ffb8b-fb38-488c-b326-df6086017073\" (UID: \"615ffb8b-fb38-488c-b326-df6086017073\") " Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.115617 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/596b137a-5101-4a31-85b8-050945c1de9b-utilities\") pod \"596b137a-5101-4a31-85b8-050945c1de9b\" (UID: \"596b137a-5101-4a31-85b8-050945c1de9b\") " Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.115663 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/615ffb8b-fb38-488c-b326-df6086017073-marketplace-trusted-ca\") pod \"615ffb8b-fb38-488c-b326-df6086017073\" (UID: \"615ffb8b-fb38-488c-b326-df6086017073\") " Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.115697 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/615ffb8b-fb38-488c-b326-df6086017073-marketplace-operator-metrics\") pod \"615ffb8b-fb38-488c-b326-df6086017073\" (UID: \"615ffb8b-fb38-488c-b326-df6086017073\") " Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.117881 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/615ffb8b-fb38-488c-b326-df6086017073-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "615ffb8b-fb38-488c-b326-df6086017073" (UID: "615ffb8b-fb38-488c-b326-df6086017073"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.120428 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/596b137a-5101-4a31-85b8-050945c1de9b-utilities" (OuterVolumeSpecName: "utilities") pod "596b137a-5101-4a31-85b8-050945c1de9b" (UID: "596b137a-5101-4a31-85b8-050945c1de9b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.127005 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/596b137a-5101-4a31-85b8-050945c1de9b-kube-api-access-qkznl" (OuterVolumeSpecName: "kube-api-access-qkznl") pod "596b137a-5101-4a31-85b8-050945c1de9b" (UID: "596b137a-5101-4a31-85b8-050945c1de9b"). InnerVolumeSpecName "kube-api-access-qkznl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.127219 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/615ffb8b-fb38-488c-b326-df6086017073-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "615ffb8b-fb38-488c-b326-df6086017073" (UID: "615ffb8b-fb38-488c-b326-df6086017073"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.127776 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/615ffb8b-fb38-488c-b326-df6086017073-kube-api-access-xg5w7" (OuterVolumeSpecName: "kube-api-access-xg5w7") pod "615ffb8b-fb38-488c-b326-df6086017073" (UID: "615ffb8b-fb38-488c-b326-df6086017073"). InnerVolumeSpecName "kube-api-access-xg5w7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.134460 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/596b137a-5101-4a31-85b8-050945c1de9b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "596b137a-5101-4a31-85b8-050945c1de9b" (UID: "596b137a-5101-4a31-85b8-050945c1de9b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.217111 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f3bc61e-17ba-4f89-b582-1d4efd6d7146-catalog-content\") pod \"2f3bc61e-17ba-4f89-b582-1d4efd6d7146\" (UID: \"2f3bc61e-17ba-4f89-b582-1d4efd6d7146\") " Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.217177 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f3bc61e-17ba-4f89-b582-1d4efd6d7146-utilities\") pod \"2f3bc61e-17ba-4f89-b582-1d4efd6d7146\" (UID: \"2f3bc61e-17ba-4f89-b582-1d4efd6d7146\") " Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.217213 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fd54ef5c-5d59-4326-aba3-ba1a915313c9-utilities\") pod \"fd54ef5c-5d59-4326-aba3-ba1a915313c9\" (UID: \"fd54ef5c-5d59-4326-aba3-ba1a915313c9\") " Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.217256 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fd54ef5c-5d59-4326-aba3-ba1a915313c9-catalog-content\") pod \"fd54ef5c-5d59-4326-aba3-ba1a915313c9\" (UID: \"fd54ef5c-5d59-4326-aba3-ba1a915313c9\") " Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.217288 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pl2l9\" (UniqueName: \"kubernetes.io/projected/2f3bc61e-17ba-4f89-b582-1d4efd6d7146-kube-api-access-pl2l9\") pod \"2f3bc61e-17ba-4f89-b582-1d4efd6d7146\" (UID: \"2f3bc61e-17ba-4f89-b582-1d4efd6d7146\") " Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.217321 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2p4v9\" (UniqueName: \"kubernetes.io/projected/fd54ef5c-5d59-4326-aba3-ba1a915313c9-kube-api-access-2p4v9\") pod \"fd54ef5c-5d59-4326-aba3-ba1a915313c9\" (UID: \"fd54ef5c-5d59-4326-aba3-ba1a915313c9\") " Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.217576 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/596b137a-5101-4a31-85b8-050945c1de9b-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.217591 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qkznl\" (UniqueName: \"kubernetes.io/projected/596b137a-5101-4a31-85b8-050945c1de9b-kube-api-access-qkznl\") on node \"crc\" DevicePath \"\"" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.217602 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xg5w7\" (UniqueName: \"kubernetes.io/projected/615ffb8b-fb38-488c-b326-df6086017073-kube-api-access-xg5w7\") on node \"crc\" DevicePath \"\"" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.217611 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/596b137a-5101-4a31-85b8-050945c1de9b-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.217624 4911 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/615ffb8b-fb38-488c-b326-df6086017073-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.217633 4911 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/615ffb8b-fb38-488c-b326-df6086017073-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.219045 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fd54ef5c-5d59-4326-aba3-ba1a915313c9-utilities" (OuterVolumeSpecName: "utilities") pod "fd54ef5c-5d59-4326-aba3-ba1a915313c9" (UID: "fd54ef5c-5d59-4326-aba3-ba1a915313c9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.220092 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2f3bc61e-17ba-4f89-b582-1d4efd6d7146-utilities" (OuterVolumeSpecName: "utilities") pod "2f3bc61e-17ba-4f89-b582-1d4efd6d7146" (UID: "2f3bc61e-17ba-4f89-b582-1d4efd6d7146"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.220530 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd54ef5c-5d59-4326-aba3-ba1a915313c9-kube-api-access-2p4v9" (OuterVolumeSpecName: "kube-api-access-2p4v9") pod "fd54ef5c-5d59-4326-aba3-ba1a915313c9" (UID: "fd54ef5c-5d59-4326-aba3-ba1a915313c9"). InnerVolumeSpecName "kube-api-access-2p4v9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.226529 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2f3bc61e-17ba-4f89-b582-1d4efd6d7146-kube-api-access-pl2l9" (OuterVolumeSpecName: "kube-api-access-pl2l9") pod "2f3bc61e-17ba-4f89-b582-1d4efd6d7146" (UID: "2f3bc61e-17ba-4f89-b582-1d4efd6d7146"). InnerVolumeSpecName "kube-api-access-pl2l9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.314314 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2f3bc61e-17ba-4f89-b582-1d4efd6d7146-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2f3bc61e-17ba-4f89-b582-1d4efd6d7146" (UID: "2f3bc61e-17ba-4f89-b582-1d4efd6d7146"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.319711 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f3bc61e-17ba-4f89-b582-1d4efd6d7146-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.319843 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f3bc61e-17ba-4f89-b582-1d4efd6d7146-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.319869 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fd54ef5c-5d59-4326-aba3-ba1a915313c9-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.319889 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pl2l9\" (UniqueName: \"kubernetes.io/projected/2f3bc61e-17ba-4f89-b582-1d4efd6d7146-kube-api-access-pl2l9\") on node \"crc\" DevicePath \"\"" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.319917 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2p4v9\" (UniqueName: \"kubernetes.io/projected/fd54ef5c-5d59-4326-aba3-ba1a915313c9-kube-api-access-2p4v9\") on node \"crc\" DevicePath \"\"" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.327690 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fd54ef5c-5d59-4326-aba3-ba1a915313c9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fd54ef5c-5d59-4326-aba3-ba1a915313c9" (UID: "fd54ef5c-5d59-4326-aba3-ba1a915313c9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.376989 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cz97x" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.426020 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fd54ef5c-5d59-4326-aba3-ba1a915313c9-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.451866 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-pbd48"] Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.526503 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4f7f281b-93f4-42fe-9996-2a5a4860ce88-catalog-content\") pod \"4f7f281b-93f4-42fe-9996-2a5a4860ce88\" (UID: \"4f7f281b-93f4-42fe-9996-2a5a4860ce88\") " Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.526593 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kw74r\" (UniqueName: \"kubernetes.io/projected/4f7f281b-93f4-42fe-9996-2a5a4860ce88-kube-api-access-kw74r\") pod \"4f7f281b-93f4-42fe-9996-2a5a4860ce88\" (UID: \"4f7f281b-93f4-42fe-9996-2a5a4860ce88\") " Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.526729 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4f7f281b-93f4-42fe-9996-2a5a4860ce88-utilities\") pod \"4f7f281b-93f4-42fe-9996-2a5a4860ce88\" (UID: \"4f7f281b-93f4-42fe-9996-2a5a4860ce88\") " Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.527932 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4f7f281b-93f4-42fe-9996-2a5a4860ce88-utilities" (OuterVolumeSpecName: "utilities") pod "4f7f281b-93f4-42fe-9996-2a5a4860ce88" (UID: "4f7f281b-93f4-42fe-9996-2a5a4860ce88"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.531956 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4f7f281b-93f4-42fe-9996-2a5a4860ce88-kube-api-access-kw74r" (OuterVolumeSpecName: "kube-api-access-kw74r") pod "4f7f281b-93f4-42fe-9996-2a5a4860ce88" (UID: "4f7f281b-93f4-42fe-9996-2a5a4860ce88"). InnerVolumeSpecName "kube-api-access-kw74r". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.543055 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-pbd48" event={"ID":"8089b532-3c10-498a-9558-7b5d845d6c7e","Type":"ContainerStarted","Data":"ae31edd8c649e0d473eb9156324e47ba9a665bbe289a3e2258a22768225e74ff"} Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.545473 4911 generic.go:334] "Generic (PLEG): container finished" podID="615ffb8b-fb38-488c-b326-df6086017073" containerID="6b1da2c46960ab94bdd010db042ac3c495610e1966348fb29478628532259603" exitCode=0 Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.545540 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-s8dhg" event={"ID":"615ffb8b-fb38-488c-b326-df6086017073","Type":"ContainerDied","Data":"6b1da2c46960ab94bdd010db042ac3c495610e1966348fb29478628532259603"} Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.545577 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-s8dhg" event={"ID":"615ffb8b-fb38-488c-b326-df6086017073","Type":"ContainerDied","Data":"f1e6abefb7fef8bb5582277656c9d95b0d3e3137b030efb3697a5d9d1c697388"} Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.545601 4911 scope.go:117] "RemoveContainer" containerID="6b1da2c46960ab94bdd010db042ac3c495610e1966348fb29478628532259603" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.545718 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-s8dhg" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.556309 4911 generic.go:334] "Generic (PLEG): container finished" podID="596b137a-5101-4a31-85b8-050945c1de9b" containerID="1b044fa87a9a6c81046846c12f98d0e91b26ce215d17b0f51930a6bec0c267a2" exitCode=0 Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.556932 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hxt4d" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.556976 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hxt4d" event={"ID":"596b137a-5101-4a31-85b8-050945c1de9b","Type":"ContainerDied","Data":"1b044fa87a9a6c81046846c12f98d0e91b26ce215d17b0f51930a6bec0c267a2"} Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.557030 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hxt4d" event={"ID":"596b137a-5101-4a31-85b8-050945c1de9b","Type":"ContainerDied","Data":"122a95699c8b7ad6c0a4f25d49c25b157055a28a10108321f27866606cfe29a2"} Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.563846 4911 generic.go:334] "Generic (PLEG): container finished" podID="4f7f281b-93f4-42fe-9996-2a5a4860ce88" containerID="120c1d0845d25dba4daeccf45261faf753dbb38bf6d14700cd5b36ac8a91c27d" exitCode=0 Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.563941 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cz97x" event={"ID":"4f7f281b-93f4-42fe-9996-2a5a4860ce88","Type":"ContainerDied","Data":"120c1d0845d25dba4daeccf45261faf753dbb38bf6d14700cd5b36ac8a91c27d"} Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.563979 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cz97x" event={"ID":"4f7f281b-93f4-42fe-9996-2a5a4860ce88","Type":"ContainerDied","Data":"2676ad159071d6440f5ab5bd28fef5363c3beaf07610358d1d623a5ace283453"} Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.563994 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cz97x" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.569920 4911 generic.go:334] "Generic (PLEG): container finished" podID="2f3bc61e-17ba-4f89-b582-1d4efd6d7146" containerID="e710e1ab6de81605b0a0c0eaeaa0f951f26d3bd35a1f1c42643dc2379e99e155" exitCode=0 Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.570004 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qksj7" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.570037 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qksj7" event={"ID":"2f3bc61e-17ba-4f89-b582-1d4efd6d7146","Type":"ContainerDied","Data":"e710e1ab6de81605b0a0c0eaeaa0f951f26d3bd35a1f1c42643dc2379e99e155"} Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.570135 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qksj7" event={"ID":"2f3bc61e-17ba-4f89-b582-1d4efd6d7146","Type":"ContainerDied","Data":"571dc35a825eefea0984d93784f7ebc9cb512e0a4915c2a8ecd39eaccd1f6428"} Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.591851 4911 generic.go:334] "Generic (PLEG): container finished" podID="fd54ef5c-5d59-4326-aba3-ba1a915313c9" containerID="93c28e9ee4e904641fc45aff85da17b17330556461b41fdb193063169f31ee72" exitCode=0 Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.591914 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m9d9l" event={"ID":"fd54ef5c-5d59-4326-aba3-ba1a915313c9","Type":"ContainerDied","Data":"93c28e9ee4e904641fc45aff85da17b17330556461b41fdb193063169f31ee72"} Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.591978 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m9d9l" event={"ID":"fd54ef5c-5d59-4326-aba3-ba1a915313c9","Type":"ContainerDied","Data":"ea96b6f6921b59594e676c818f6009053dbc857b760af8ddb16e8346a6c0b1a1"} Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.592183 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m9d9l" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.603469 4911 scope.go:117] "RemoveContainer" containerID="6b1da2c46960ab94bdd010db042ac3c495610e1966348fb29478628532259603" Sep 29 21:29:10 crc kubenswrapper[4911]: E0929 21:29:10.618699 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6b1da2c46960ab94bdd010db042ac3c495610e1966348fb29478628532259603\": container with ID starting with 6b1da2c46960ab94bdd010db042ac3c495610e1966348fb29478628532259603 not found: ID does not exist" containerID="6b1da2c46960ab94bdd010db042ac3c495610e1966348fb29478628532259603" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.618818 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6b1da2c46960ab94bdd010db042ac3c495610e1966348fb29478628532259603"} err="failed to get container status \"6b1da2c46960ab94bdd010db042ac3c495610e1966348fb29478628532259603\": rpc error: code = NotFound desc = could not find container \"6b1da2c46960ab94bdd010db042ac3c495610e1966348fb29478628532259603\": container with ID starting with 6b1da2c46960ab94bdd010db042ac3c495610e1966348fb29478628532259603 not found: ID does not exist" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.618862 4911 scope.go:117] "RemoveContainer" containerID="1b044fa87a9a6c81046846c12f98d0e91b26ce215d17b0f51930a6bec0c267a2" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.625738 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-s8dhg"] Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.628929 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kw74r\" (UniqueName: \"kubernetes.io/projected/4f7f281b-93f4-42fe-9996-2a5a4860ce88-kube-api-access-kw74r\") on node \"crc\" DevicePath \"\"" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.628956 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4f7f281b-93f4-42fe-9996-2a5a4860ce88-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.632089 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-s8dhg"] Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.634382 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-hxt4d"] Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.636566 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-hxt4d"] Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.640961 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4f7f281b-93f4-42fe-9996-2a5a4860ce88-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4f7f281b-93f4-42fe-9996-2a5a4860ce88" (UID: "4f7f281b-93f4-42fe-9996-2a5a4860ce88"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.654497 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-qksj7"] Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.658123 4911 scope.go:117] "RemoveContainer" containerID="2e6f07385cf63d0494c2ff76f5c4a17f89332df599749aca3f223efbed594922" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.662559 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-qksj7"] Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.671676 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-m9d9l"] Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.673285 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-m9d9l"] Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.709370 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2f3bc61e-17ba-4f89-b582-1d4efd6d7146" path="/var/lib/kubelet/pods/2f3bc61e-17ba-4f89-b582-1d4efd6d7146/volumes" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.712259 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="596b137a-5101-4a31-85b8-050945c1de9b" path="/var/lib/kubelet/pods/596b137a-5101-4a31-85b8-050945c1de9b/volumes" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.713122 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="615ffb8b-fb38-488c-b326-df6086017073" path="/var/lib/kubelet/pods/615ffb8b-fb38-488c-b326-df6086017073/volumes" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.713622 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fd54ef5c-5d59-4326-aba3-ba1a915313c9" path="/var/lib/kubelet/pods/fd54ef5c-5d59-4326-aba3-ba1a915313c9/volumes" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.715864 4911 scope.go:117] "RemoveContainer" containerID="e7a83d500e3f93100dc284099fae8792152446ece658fad56b9a50a879018cbe" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.729661 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4f7f281b-93f4-42fe-9996-2a5a4860ce88-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.732757 4911 scope.go:117] "RemoveContainer" containerID="1b044fa87a9a6c81046846c12f98d0e91b26ce215d17b0f51930a6bec0c267a2" Sep 29 21:29:10 crc kubenswrapper[4911]: E0929 21:29:10.733260 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1b044fa87a9a6c81046846c12f98d0e91b26ce215d17b0f51930a6bec0c267a2\": container with ID starting with 1b044fa87a9a6c81046846c12f98d0e91b26ce215d17b0f51930a6bec0c267a2 not found: ID does not exist" containerID="1b044fa87a9a6c81046846c12f98d0e91b26ce215d17b0f51930a6bec0c267a2" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.733294 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1b044fa87a9a6c81046846c12f98d0e91b26ce215d17b0f51930a6bec0c267a2"} err="failed to get container status \"1b044fa87a9a6c81046846c12f98d0e91b26ce215d17b0f51930a6bec0c267a2\": rpc error: code = NotFound desc = could not find container \"1b044fa87a9a6c81046846c12f98d0e91b26ce215d17b0f51930a6bec0c267a2\": container with ID starting with 1b044fa87a9a6c81046846c12f98d0e91b26ce215d17b0f51930a6bec0c267a2 not found: ID does not exist" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.733319 4911 scope.go:117] "RemoveContainer" containerID="2e6f07385cf63d0494c2ff76f5c4a17f89332df599749aca3f223efbed594922" Sep 29 21:29:10 crc kubenswrapper[4911]: E0929 21:29:10.733687 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2e6f07385cf63d0494c2ff76f5c4a17f89332df599749aca3f223efbed594922\": container with ID starting with 2e6f07385cf63d0494c2ff76f5c4a17f89332df599749aca3f223efbed594922 not found: ID does not exist" containerID="2e6f07385cf63d0494c2ff76f5c4a17f89332df599749aca3f223efbed594922" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.733769 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2e6f07385cf63d0494c2ff76f5c4a17f89332df599749aca3f223efbed594922"} err="failed to get container status \"2e6f07385cf63d0494c2ff76f5c4a17f89332df599749aca3f223efbed594922\": rpc error: code = NotFound desc = could not find container \"2e6f07385cf63d0494c2ff76f5c4a17f89332df599749aca3f223efbed594922\": container with ID starting with 2e6f07385cf63d0494c2ff76f5c4a17f89332df599749aca3f223efbed594922 not found: ID does not exist" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.733813 4911 scope.go:117] "RemoveContainer" containerID="e7a83d500e3f93100dc284099fae8792152446ece658fad56b9a50a879018cbe" Sep 29 21:29:10 crc kubenswrapper[4911]: E0929 21:29:10.737016 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e7a83d500e3f93100dc284099fae8792152446ece658fad56b9a50a879018cbe\": container with ID starting with e7a83d500e3f93100dc284099fae8792152446ece658fad56b9a50a879018cbe not found: ID does not exist" containerID="e7a83d500e3f93100dc284099fae8792152446ece658fad56b9a50a879018cbe" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.737075 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e7a83d500e3f93100dc284099fae8792152446ece658fad56b9a50a879018cbe"} err="failed to get container status \"e7a83d500e3f93100dc284099fae8792152446ece658fad56b9a50a879018cbe\": rpc error: code = NotFound desc = could not find container \"e7a83d500e3f93100dc284099fae8792152446ece658fad56b9a50a879018cbe\": container with ID starting with e7a83d500e3f93100dc284099fae8792152446ece658fad56b9a50a879018cbe not found: ID does not exist" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.737116 4911 scope.go:117] "RemoveContainer" containerID="120c1d0845d25dba4daeccf45261faf753dbb38bf6d14700cd5b36ac8a91c27d" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.750964 4911 scope.go:117] "RemoveContainer" containerID="eece27ba58678cf4b3b9de80942a6917996aef9947f33623a53e16aec1a0f6c4" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.785228 4911 scope.go:117] "RemoveContainer" containerID="c9bc50e5782112b86ed302a0fb42c047cb203268f58b6c3d66d93deb5e6fe3d5" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.810627 4911 scope.go:117] "RemoveContainer" containerID="120c1d0845d25dba4daeccf45261faf753dbb38bf6d14700cd5b36ac8a91c27d" Sep 29 21:29:10 crc kubenswrapper[4911]: E0929 21:29:10.811232 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"120c1d0845d25dba4daeccf45261faf753dbb38bf6d14700cd5b36ac8a91c27d\": container with ID starting with 120c1d0845d25dba4daeccf45261faf753dbb38bf6d14700cd5b36ac8a91c27d not found: ID does not exist" containerID="120c1d0845d25dba4daeccf45261faf753dbb38bf6d14700cd5b36ac8a91c27d" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.811275 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"120c1d0845d25dba4daeccf45261faf753dbb38bf6d14700cd5b36ac8a91c27d"} err="failed to get container status \"120c1d0845d25dba4daeccf45261faf753dbb38bf6d14700cd5b36ac8a91c27d\": rpc error: code = NotFound desc = could not find container \"120c1d0845d25dba4daeccf45261faf753dbb38bf6d14700cd5b36ac8a91c27d\": container with ID starting with 120c1d0845d25dba4daeccf45261faf753dbb38bf6d14700cd5b36ac8a91c27d not found: ID does not exist" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.811308 4911 scope.go:117] "RemoveContainer" containerID="eece27ba58678cf4b3b9de80942a6917996aef9947f33623a53e16aec1a0f6c4" Sep 29 21:29:10 crc kubenswrapper[4911]: E0929 21:29:10.811628 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eece27ba58678cf4b3b9de80942a6917996aef9947f33623a53e16aec1a0f6c4\": container with ID starting with eece27ba58678cf4b3b9de80942a6917996aef9947f33623a53e16aec1a0f6c4 not found: ID does not exist" containerID="eece27ba58678cf4b3b9de80942a6917996aef9947f33623a53e16aec1a0f6c4" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.811682 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eece27ba58678cf4b3b9de80942a6917996aef9947f33623a53e16aec1a0f6c4"} err="failed to get container status \"eece27ba58678cf4b3b9de80942a6917996aef9947f33623a53e16aec1a0f6c4\": rpc error: code = NotFound desc = could not find container \"eece27ba58678cf4b3b9de80942a6917996aef9947f33623a53e16aec1a0f6c4\": container with ID starting with eece27ba58678cf4b3b9de80942a6917996aef9947f33623a53e16aec1a0f6c4 not found: ID does not exist" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.811745 4911 scope.go:117] "RemoveContainer" containerID="c9bc50e5782112b86ed302a0fb42c047cb203268f58b6c3d66d93deb5e6fe3d5" Sep 29 21:29:10 crc kubenswrapper[4911]: E0929 21:29:10.812265 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c9bc50e5782112b86ed302a0fb42c047cb203268f58b6c3d66d93deb5e6fe3d5\": container with ID starting with c9bc50e5782112b86ed302a0fb42c047cb203268f58b6c3d66d93deb5e6fe3d5 not found: ID does not exist" containerID="c9bc50e5782112b86ed302a0fb42c047cb203268f58b6c3d66d93deb5e6fe3d5" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.812291 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c9bc50e5782112b86ed302a0fb42c047cb203268f58b6c3d66d93deb5e6fe3d5"} err="failed to get container status \"c9bc50e5782112b86ed302a0fb42c047cb203268f58b6c3d66d93deb5e6fe3d5\": rpc error: code = NotFound desc = could not find container \"c9bc50e5782112b86ed302a0fb42c047cb203268f58b6c3d66d93deb5e6fe3d5\": container with ID starting with c9bc50e5782112b86ed302a0fb42c047cb203268f58b6c3d66d93deb5e6fe3d5 not found: ID does not exist" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.812306 4911 scope.go:117] "RemoveContainer" containerID="e710e1ab6de81605b0a0c0eaeaa0f951f26d3bd35a1f1c42643dc2379e99e155" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.825867 4911 scope.go:117] "RemoveContainer" containerID="52205d062c35d88243fd29424b6c69de67a0594053fd37da7be4f118a4031746" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.845370 4911 scope.go:117] "RemoveContainer" containerID="f6476bac94160453c95d099d6464c2395a4ae99d0b00c5efe805f5765798ff19" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.864390 4911 scope.go:117] "RemoveContainer" containerID="e710e1ab6de81605b0a0c0eaeaa0f951f26d3bd35a1f1c42643dc2379e99e155" Sep 29 21:29:10 crc kubenswrapper[4911]: E0929 21:29:10.866328 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e710e1ab6de81605b0a0c0eaeaa0f951f26d3bd35a1f1c42643dc2379e99e155\": container with ID starting with e710e1ab6de81605b0a0c0eaeaa0f951f26d3bd35a1f1c42643dc2379e99e155 not found: ID does not exist" containerID="e710e1ab6de81605b0a0c0eaeaa0f951f26d3bd35a1f1c42643dc2379e99e155" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.866391 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e710e1ab6de81605b0a0c0eaeaa0f951f26d3bd35a1f1c42643dc2379e99e155"} err="failed to get container status \"e710e1ab6de81605b0a0c0eaeaa0f951f26d3bd35a1f1c42643dc2379e99e155\": rpc error: code = NotFound desc = could not find container \"e710e1ab6de81605b0a0c0eaeaa0f951f26d3bd35a1f1c42643dc2379e99e155\": container with ID starting with e710e1ab6de81605b0a0c0eaeaa0f951f26d3bd35a1f1c42643dc2379e99e155 not found: ID does not exist" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.866448 4911 scope.go:117] "RemoveContainer" containerID="52205d062c35d88243fd29424b6c69de67a0594053fd37da7be4f118a4031746" Sep 29 21:29:10 crc kubenswrapper[4911]: E0929 21:29:10.866907 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"52205d062c35d88243fd29424b6c69de67a0594053fd37da7be4f118a4031746\": container with ID starting with 52205d062c35d88243fd29424b6c69de67a0594053fd37da7be4f118a4031746 not found: ID does not exist" containerID="52205d062c35d88243fd29424b6c69de67a0594053fd37da7be4f118a4031746" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.867044 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"52205d062c35d88243fd29424b6c69de67a0594053fd37da7be4f118a4031746"} err="failed to get container status \"52205d062c35d88243fd29424b6c69de67a0594053fd37da7be4f118a4031746\": rpc error: code = NotFound desc = could not find container \"52205d062c35d88243fd29424b6c69de67a0594053fd37da7be4f118a4031746\": container with ID starting with 52205d062c35d88243fd29424b6c69de67a0594053fd37da7be4f118a4031746 not found: ID does not exist" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.867342 4911 scope.go:117] "RemoveContainer" containerID="f6476bac94160453c95d099d6464c2395a4ae99d0b00c5efe805f5765798ff19" Sep 29 21:29:10 crc kubenswrapper[4911]: E0929 21:29:10.867922 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f6476bac94160453c95d099d6464c2395a4ae99d0b00c5efe805f5765798ff19\": container with ID starting with f6476bac94160453c95d099d6464c2395a4ae99d0b00c5efe805f5765798ff19 not found: ID does not exist" containerID="f6476bac94160453c95d099d6464c2395a4ae99d0b00c5efe805f5765798ff19" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.867959 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f6476bac94160453c95d099d6464c2395a4ae99d0b00c5efe805f5765798ff19"} err="failed to get container status \"f6476bac94160453c95d099d6464c2395a4ae99d0b00c5efe805f5765798ff19\": rpc error: code = NotFound desc = could not find container \"f6476bac94160453c95d099d6464c2395a4ae99d0b00c5efe805f5765798ff19\": container with ID starting with f6476bac94160453c95d099d6464c2395a4ae99d0b00c5efe805f5765798ff19 not found: ID does not exist" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.867985 4911 scope.go:117] "RemoveContainer" containerID="93c28e9ee4e904641fc45aff85da17b17330556461b41fdb193063169f31ee72" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.880852 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-cz97x"] Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.883129 4911 scope.go:117] "RemoveContainer" containerID="7c20063bc4259927b13d11b2f0ff67b06a13aa260f23849b1f7e67da7e4e47fa" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.888224 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-cz97x"] Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.902936 4911 scope.go:117] "RemoveContainer" containerID="40dafd89bfc7fd90dd5ce34462dce3535d4301ec7218f92f5f30bf27af94cbdb" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.919143 4911 scope.go:117] "RemoveContainer" containerID="93c28e9ee4e904641fc45aff85da17b17330556461b41fdb193063169f31ee72" Sep 29 21:29:10 crc kubenswrapper[4911]: E0929 21:29:10.919766 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"93c28e9ee4e904641fc45aff85da17b17330556461b41fdb193063169f31ee72\": container with ID starting with 93c28e9ee4e904641fc45aff85da17b17330556461b41fdb193063169f31ee72 not found: ID does not exist" containerID="93c28e9ee4e904641fc45aff85da17b17330556461b41fdb193063169f31ee72" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.919854 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"93c28e9ee4e904641fc45aff85da17b17330556461b41fdb193063169f31ee72"} err="failed to get container status \"93c28e9ee4e904641fc45aff85da17b17330556461b41fdb193063169f31ee72\": rpc error: code = NotFound desc = could not find container \"93c28e9ee4e904641fc45aff85da17b17330556461b41fdb193063169f31ee72\": container with ID starting with 93c28e9ee4e904641fc45aff85da17b17330556461b41fdb193063169f31ee72 not found: ID does not exist" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.919903 4911 scope.go:117] "RemoveContainer" containerID="7c20063bc4259927b13d11b2f0ff67b06a13aa260f23849b1f7e67da7e4e47fa" Sep 29 21:29:10 crc kubenswrapper[4911]: E0929 21:29:10.920565 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7c20063bc4259927b13d11b2f0ff67b06a13aa260f23849b1f7e67da7e4e47fa\": container with ID starting with 7c20063bc4259927b13d11b2f0ff67b06a13aa260f23849b1f7e67da7e4e47fa not found: ID does not exist" containerID="7c20063bc4259927b13d11b2f0ff67b06a13aa260f23849b1f7e67da7e4e47fa" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.920602 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7c20063bc4259927b13d11b2f0ff67b06a13aa260f23849b1f7e67da7e4e47fa"} err="failed to get container status \"7c20063bc4259927b13d11b2f0ff67b06a13aa260f23849b1f7e67da7e4e47fa\": rpc error: code = NotFound desc = could not find container \"7c20063bc4259927b13d11b2f0ff67b06a13aa260f23849b1f7e67da7e4e47fa\": container with ID starting with 7c20063bc4259927b13d11b2f0ff67b06a13aa260f23849b1f7e67da7e4e47fa not found: ID does not exist" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.920623 4911 scope.go:117] "RemoveContainer" containerID="40dafd89bfc7fd90dd5ce34462dce3535d4301ec7218f92f5f30bf27af94cbdb" Sep 29 21:29:10 crc kubenswrapper[4911]: E0929 21:29:10.921220 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"40dafd89bfc7fd90dd5ce34462dce3535d4301ec7218f92f5f30bf27af94cbdb\": container with ID starting with 40dafd89bfc7fd90dd5ce34462dce3535d4301ec7218f92f5f30bf27af94cbdb not found: ID does not exist" containerID="40dafd89bfc7fd90dd5ce34462dce3535d4301ec7218f92f5f30bf27af94cbdb" Sep 29 21:29:10 crc kubenswrapper[4911]: I0929 21:29:10.921266 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"40dafd89bfc7fd90dd5ce34462dce3535d4301ec7218f92f5f30bf27af94cbdb"} err="failed to get container status \"40dafd89bfc7fd90dd5ce34462dce3535d4301ec7218f92f5f30bf27af94cbdb\": rpc error: code = NotFound desc = could not find container \"40dafd89bfc7fd90dd5ce34462dce3535d4301ec7218f92f5f30bf27af94cbdb\": container with ID starting with 40dafd89bfc7fd90dd5ce34462dce3535d4301ec7218f92f5f30bf27af94cbdb not found: ID does not exist" Sep 29 21:29:11 crc kubenswrapper[4911]: I0929 21:29:11.603513 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-pbd48" event={"ID":"8089b532-3c10-498a-9558-7b5d845d6c7e","Type":"ContainerStarted","Data":"10137d8fc61dd08e15dd75db3d0fc911c0c79e1246f56e506f7a13d75c318128"} Sep 29 21:29:11 crc kubenswrapper[4911]: I0929 21:29:11.604203 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-pbd48" Sep 29 21:29:11 crc kubenswrapper[4911]: I0929 21:29:11.608239 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-pbd48" Sep 29 21:29:11 crc kubenswrapper[4911]: I0929 21:29:11.624230 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-pbd48" podStartSLOduration=2.624200845 podStartE2EDuration="2.624200845s" podCreationTimestamp="2025-09-29 21:29:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:29:11.620276301 +0000 UTC m=+229.597388992" watchObservedRunningTime="2025-09-29 21:29:11.624200845 +0000 UTC m=+229.601313506" Sep 29 21:29:11 crc kubenswrapper[4911]: I0929 21:29:11.769467 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-rm59v"] Sep 29 21:29:11 crc kubenswrapper[4911]: E0929 21:29:11.769827 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="615ffb8b-fb38-488c-b326-df6086017073" containerName="marketplace-operator" Sep 29 21:29:11 crc kubenswrapper[4911]: I0929 21:29:11.769843 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="615ffb8b-fb38-488c-b326-df6086017073" containerName="marketplace-operator" Sep 29 21:29:11 crc kubenswrapper[4911]: E0929 21:29:11.769855 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="596b137a-5101-4a31-85b8-050945c1de9b" containerName="extract-content" Sep 29 21:29:11 crc kubenswrapper[4911]: I0929 21:29:11.769862 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="596b137a-5101-4a31-85b8-050945c1de9b" containerName="extract-content" Sep 29 21:29:11 crc kubenswrapper[4911]: E0929 21:29:11.769872 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="596b137a-5101-4a31-85b8-050945c1de9b" containerName="registry-server" Sep 29 21:29:11 crc kubenswrapper[4911]: I0929 21:29:11.769877 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="596b137a-5101-4a31-85b8-050945c1de9b" containerName="registry-server" Sep 29 21:29:11 crc kubenswrapper[4911]: E0929 21:29:11.769889 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd54ef5c-5d59-4326-aba3-ba1a915313c9" containerName="registry-server" Sep 29 21:29:11 crc kubenswrapper[4911]: I0929 21:29:11.769895 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd54ef5c-5d59-4326-aba3-ba1a915313c9" containerName="registry-server" Sep 29 21:29:11 crc kubenswrapper[4911]: E0929 21:29:11.769905 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd54ef5c-5d59-4326-aba3-ba1a915313c9" containerName="extract-utilities" Sep 29 21:29:11 crc kubenswrapper[4911]: I0929 21:29:11.769912 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd54ef5c-5d59-4326-aba3-ba1a915313c9" containerName="extract-utilities" Sep 29 21:29:11 crc kubenswrapper[4911]: E0929 21:29:11.769923 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="596b137a-5101-4a31-85b8-050945c1de9b" containerName="extract-utilities" Sep 29 21:29:11 crc kubenswrapper[4911]: I0929 21:29:11.769931 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="596b137a-5101-4a31-85b8-050945c1de9b" containerName="extract-utilities" Sep 29 21:29:11 crc kubenswrapper[4911]: E0929 21:29:11.769942 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f3bc61e-17ba-4f89-b582-1d4efd6d7146" containerName="extract-content" Sep 29 21:29:11 crc kubenswrapper[4911]: I0929 21:29:11.769949 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f3bc61e-17ba-4f89-b582-1d4efd6d7146" containerName="extract-content" Sep 29 21:29:11 crc kubenswrapper[4911]: E0929 21:29:11.769956 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f3bc61e-17ba-4f89-b582-1d4efd6d7146" containerName="registry-server" Sep 29 21:29:11 crc kubenswrapper[4911]: I0929 21:29:11.769963 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f3bc61e-17ba-4f89-b582-1d4efd6d7146" containerName="registry-server" Sep 29 21:29:11 crc kubenswrapper[4911]: E0929 21:29:11.769973 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f7f281b-93f4-42fe-9996-2a5a4860ce88" containerName="registry-server" Sep 29 21:29:11 crc kubenswrapper[4911]: I0929 21:29:11.769978 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f7f281b-93f4-42fe-9996-2a5a4860ce88" containerName="registry-server" Sep 29 21:29:11 crc kubenswrapper[4911]: E0929 21:29:11.769987 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f7f281b-93f4-42fe-9996-2a5a4860ce88" containerName="extract-utilities" Sep 29 21:29:11 crc kubenswrapper[4911]: I0929 21:29:11.769994 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f7f281b-93f4-42fe-9996-2a5a4860ce88" containerName="extract-utilities" Sep 29 21:29:11 crc kubenswrapper[4911]: E0929 21:29:11.770003 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f7f281b-93f4-42fe-9996-2a5a4860ce88" containerName="extract-content" Sep 29 21:29:11 crc kubenswrapper[4911]: I0929 21:29:11.770008 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f7f281b-93f4-42fe-9996-2a5a4860ce88" containerName="extract-content" Sep 29 21:29:11 crc kubenswrapper[4911]: E0929 21:29:11.770018 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd54ef5c-5d59-4326-aba3-ba1a915313c9" containerName="extract-content" Sep 29 21:29:11 crc kubenswrapper[4911]: I0929 21:29:11.770024 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd54ef5c-5d59-4326-aba3-ba1a915313c9" containerName="extract-content" Sep 29 21:29:11 crc kubenswrapper[4911]: E0929 21:29:11.770036 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f3bc61e-17ba-4f89-b582-1d4efd6d7146" containerName="extract-utilities" Sep 29 21:29:11 crc kubenswrapper[4911]: I0929 21:29:11.770043 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f3bc61e-17ba-4f89-b582-1d4efd6d7146" containerName="extract-utilities" Sep 29 21:29:11 crc kubenswrapper[4911]: I0929 21:29:11.770141 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="596b137a-5101-4a31-85b8-050945c1de9b" containerName="registry-server" Sep 29 21:29:11 crc kubenswrapper[4911]: I0929 21:29:11.770150 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="615ffb8b-fb38-488c-b326-df6086017073" containerName="marketplace-operator" Sep 29 21:29:11 crc kubenswrapper[4911]: I0929 21:29:11.770161 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd54ef5c-5d59-4326-aba3-ba1a915313c9" containerName="registry-server" Sep 29 21:29:11 crc kubenswrapper[4911]: I0929 21:29:11.770174 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f7f281b-93f4-42fe-9996-2a5a4860ce88" containerName="registry-server" Sep 29 21:29:11 crc kubenswrapper[4911]: I0929 21:29:11.770184 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f3bc61e-17ba-4f89-b582-1d4efd6d7146" containerName="registry-server" Sep 29 21:29:11 crc kubenswrapper[4911]: I0929 21:29:11.771068 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rm59v" Sep 29 21:29:11 crc kubenswrapper[4911]: I0929 21:29:11.772900 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Sep 29 21:29:11 crc kubenswrapper[4911]: I0929 21:29:11.784968 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-rm59v"] Sep 29 21:29:11 crc kubenswrapper[4911]: I0929 21:29:11.850966 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-klxrg\" (UniqueName: \"kubernetes.io/projected/61bf2025-9b8f-4cbb-8667-c58e05bb8706-kube-api-access-klxrg\") pod \"redhat-marketplace-rm59v\" (UID: \"61bf2025-9b8f-4cbb-8667-c58e05bb8706\") " pod="openshift-marketplace/redhat-marketplace-rm59v" Sep 29 21:29:11 crc kubenswrapper[4911]: I0929 21:29:11.851046 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61bf2025-9b8f-4cbb-8667-c58e05bb8706-utilities\") pod \"redhat-marketplace-rm59v\" (UID: \"61bf2025-9b8f-4cbb-8667-c58e05bb8706\") " pod="openshift-marketplace/redhat-marketplace-rm59v" Sep 29 21:29:11 crc kubenswrapper[4911]: I0929 21:29:11.851095 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61bf2025-9b8f-4cbb-8667-c58e05bb8706-catalog-content\") pod \"redhat-marketplace-rm59v\" (UID: \"61bf2025-9b8f-4cbb-8667-c58e05bb8706\") " pod="openshift-marketplace/redhat-marketplace-rm59v" Sep 29 21:29:11 crc kubenswrapper[4911]: I0929 21:29:11.951954 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61bf2025-9b8f-4cbb-8667-c58e05bb8706-catalog-content\") pod \"redhat-marketplace-rm59v\" (UID: \"61bf2025-9b8f-4cbb-8667-c58e05bb8706\") " pod="openshift-marketplace/redhat-marketplace-rm59v" Sep 29 21:29:11 crc kubenswrapper[4911]: I0929 21:29:11.952020 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-klxrg\" (UniqueName: \"kubernetes.io/projected/61bf2025-9b8f-4cbb-8667-c58e05bb8706-kube-api-access-klxrg\") pod \"redhat-marketplace-rm59v\" (UID: \"61bf2025-9b8f-4cbb-8667-c58e05bb8706\") " pod="openshift-marketplace/redhat-marketplace-rm59v" Sep 29 21:29:11 crc kubenswrapper[4911]: I0929 21:29:11.952064 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61bf2025-9b8f-4cbb-8667-c58e05bb8706-utilities\") pod \"redhat-marketplace-rm59v\" (UID: \"61bf2025-9b8f-4cbb-8667-c58e05bb8706\") " pod="openshift-marketplace/redhat-marketplace-rm59v" Sep 29 21:29:11 crc kubenswrapper[4911]: I0929 21:29:11.952608 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61bf2025-9b8f-4cbb-8667-c58e05bb8706-utilities\") pod \"redhat-marketplace-rm59v\" (UID: \"61bf2025-9b8f-4cbb-8667-c58e05bb8706\") " pod="openshift-marketplace/redhat-marketplace-rm59v" Sep 29 21:29:11 crc kubenswrapper[4911]: I0929 21:29:11.953316 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61bf2025-9b8f-4cbb-8667-c58e05bb8706-catalog-content\") pod \"redhat-marketplace-rm59v\" (UID: \"61bf2025-9b8f-4cbb-8667-c58e05bb8706\") " pod="openshift-marketplace/redhat-marketplace-rm59v" Sep 29 21:29:11 crc kubenswrapper[4911]: I0929 21:29:11.964409 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-7578h"] Sep 29 21:29:11 crc kubenswrapper[4911]: I0929 21:29:11.966029 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7578h" Sep 29 21:29:11 crc kubenswrapper[4911]: I0929 21:29:11.971558 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Sep 29 21:29:11 crc kubenswrapper[4911]: I0929 21:29:11.975883 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-7578h"] Sep 29 21:29:11 crc kubenswrapper[4911]: I0929 21:29:11.995043 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-klxrg\" (UniqueName: \"kubernetes.io/projected/61bf2025-9b8f-4cbb-8667-c58e05bb8706-kube-api-access-klxrg\") pod \"redhat-marketplace-rm59v\" (UID: \"61bf2025-9b8f-4cbb-8667-c58e05bb8706\") " pod="openshift-marketplace/redhat-marketplace-rm59v" Sep 29 21:29:12 crc kubenswrapper[4911]: I0929 21:29:12.053466 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2whlq\" (UniqueName: \"kubernetes.io/projected/d3e76ef6-306e-4953-a379-367d277b9db4-kube-api-access-2whlq\") pod \"redhat-operators-7578h\" (UID: \"d3e76ef6-306e-4953-a379-367d277b9db4\") " pod="openshift-marketplace/redhat-operators-7578h" Sep 29 21:29:12 crc kubenswrapper[4911]: I0929 21:29:12.053559 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d3e76ef6-306e-4953-a379-367d277b9db4-utilities\") pod \"redhat-operators-7578h\" (UID: \"d3e76ef6-306e-4953-a379-367d277b9db4\") " pod="openshift-marketplace/redhat-operators-7578h" Sep 29 21:29:12 crc kubenswrapper[4911]: I0929 21:29:12.053593 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d3e76ef6-306e-4953-a379-367d277b9db4-catalog-content\") pod \"redhat-operators-7578h\" (UID: \"d3e76ef6-306e-4953-a379-367d277b9db4\") " pod="openshift-marketplace/redhat-operators-7578h" Sep 29 21:29:12 crc kubenswrapper[4911]: I0929 21:29:12.105660 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rm59v" Sep 29 21:29:12 crc kubenswrapper[4911]: I0929 21:29:12.156534 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2whlq\" (UniqueName: \"kubernetes.io/projected/d3e76ef6-306e-4953-a379-367d277b9db4-kube-api-access-2whlq\") pod \"redhat-operators-7578h\" (UID: \"d3e76ef6-306e-4953-a379-367d277b9db4\") " pod="openshift-marketplace/redhat-operators-7578h" Sep 29 21:29:12 crc kubenswrapper[4911]: I0929 21:29:12.156781 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d3e76ef6-306e-4953-a379-367d277b9db4-utilities\") pod \"redhat-operators-7578h\" (UID: \"d3e76ef6-306e-4953-a379-367d277b9db4\") " pod="openshift-marketplace/redhat-operators-7578h" Sep 29 21:29:12 crc kubenswrapper[4911]: I0929 21:29:12.158126 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d3e76ef6-306e-4953-a379-367d277b9db4-catalog-content\") pod \"redhat-operators-7578h\" (UID: \"d3e76ef6-306e-4953-a379-367d277b9db4\") " pod="openshift-marketplace/redhat-operators-7578h" Sep 29 21:29:12 crc kubenswrapper[4911]: I0929 21:29:12.161206 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d3e76ef6-306e-4953-a379-367d277b9db4-catalog-content\") pod \"redhat-operators-7578h\" (UID: \"d3e76ef6-306e-4953-a379-367d277b9db4\") " pod="openshift-marketplace/redhat-operators-7578h" Sep 29 21:29:12 crc kubenswrapper[4911]: I0929 21:29:12.161576 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d3e76ef6-306e-4953-a379-367d277b9db4-utilities\") pod \"redhat-operators-7578h\" (UID: \"d3e76ef6-306e-4953-a379-367d277b9db4\") " pod="openshift-marketplace/redhat-operators-7578h" Sep 29 21:29:12 crc kubenswrapper[4911]: I0929 21:29:12.179955 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2whlq\" (UniqueName: \"kubernetes.io/projected/d3e76ef6-306e-4953-a379-367d277b9db4-kube-api-access-2whlq\") pod \"redhat-operators-7578h\" (UID: \"d3e76ef6-306e-4953-a379-367d277b9db4\") " pod="openshift-marketplace/redhat-operators-7578h" Sep 29 21:29:12 crc kubenswrapper[4911]: I0929 21:29:12.292601 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7578h" Sep 29 21:29:12 crc kubenswrapper[4911]: I0929 21:29:12.350464 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-rm59v"] Sep 29 21:29:12 crc kubenswrapper[4911]: W0929 21:29:12.356558 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod61bf2025_9b8f_4cbb_8667_c58e05bb8706.slice/crio-d409462000f1ab448ccc9f300968049aac6e43780f610708c8565503bae6b59d WatchSource:0}: Error finding container d409462000f1ab448ccc9f300968049aac6e43780f610708c8565503bae6b59d: Status 404 returned error can't find the container with id d409462000f1ab448ccc9f300968049aac6e43780f610708c8565503bae6b59d Sep 29 21:29:12 crc kubenswrapper[4911]: I0929 21:29:12.495803 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-7578h"] Sep 29 21:29:12 crc kubenswrapper[4911]: I0929 21:29:12.619823 4911 generic.go:334] "Generic (PLEG): container finished" podID="61bf2025-9b8f-4cbb-8667-c58e05bb8706" containerID="20a4782e8feb7df9951cab043aa8012b5b918134d1e0a8d55fd7ab2d5c864ca6" exitCode=0 Sep 29 21:29:12 crc kubenswrapper[4911]: I0929 21:29:12.620264 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rm59v" event={"ID":"61bf2025-9b8f-4cbb-8667-c58e05bb8706","Type":"ContainerDied","Data":"20a4782e8feb7df9951cab043aa8012b5b918134d1e0a8d55fd7ab2d5c864ca6"} Sep 29 21:29:12 crc kubenswrapper[4911]: I0929 21:29:12.620322 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rm59v" event={"ID":"61bf2025-9b8f-4cbb-8667-c58e05bb8706","Type":"ContainerStarted","Data":"d409462000f1ab448ccc9f300968049aac6e43780f610708c8565503bae6b59d"} Sep 29 21:29:12 crc kubenswrapper[4911]: I0929 21:29:12.624071 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7578h" event={"ID":"d3e76ef6-306e-4953-a379-367d277b9db4","Type":"ContainerStarted","Data":"e9ea47783798c0c99dfd9da2247988675ab952ec9241dc0b863bfb96549c336d"} Sep 29 21:29:12 crc kubenswrapper[4911]: I0929 21:29:12.708010 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4f7f281b-93f4-42fe-9996-2a5a4860ce88" path="/var/lib/kubelet/pods/4f7f281b-93f4-42fe-9996-2a5a4860ce88/volumes" Sep 29 21:29:13 crc kubenswrapper[4911]: I0929 21:29:13.631847 4911 generic.go:334] "Generic (PLEG): container finished" podID="d3e76ef6-306e-4953-a379-367d277b9db4" containerID="7497dbd775be3209fe914be43a0439720cec3f32bbcd483e6f584ee8126a3eab" exitCode=0 Sep 29 21:29:13 crc kubenswrapper[4911]: I0929 21:29:13.631931 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7578h" event={"ID":"d3e76ef6-306e-4953-a379-367d277b9db4","Type":"ContainerDied","Data":"7497dbd775be3209fe914be43a0439720cec3f32bbcd483e6f584ee8126a3eab"} Sep 29 21:29:13 crc kubenswrapper[4911]: I0929 21:29:13.635410 4911 generic.go:334] "Generic (PLEG): container finished" podID="61bf2025-9b8f-4cbb-8667-c58e05bb8706" containerID="5966ac6e376fbb836b47f97f945593d23b39972f8c250f3c88607c7610bafe77" exitCode=0 Sep 29 21:29:13 crc kubenswrapper[4911]: I0929 21:29:13.635472 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rm59v" event={"ID":"61bf2025-9b8f-4cbb-8667-c58e05bb8706","Type":"ContainerDied","Data":"5966ac6e376fbb836b47f97f945593d23b39972f8c250f3c88607c7610bafe77"} Sep 29 21:29:14 crc kubenswrapper[4911]: I0929 21:29:14.167707 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-wxv47"] Sep 29 21:29:14 crc kubenswrapper[4911]: I0929 21:29:14.170192 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wxv47" Sep 29 21:29:14 crc kubenswrapper[4911]: I0929 21:29:14.172137 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Sep 29 21:29:14 crc kubenswrapper[4911]: I0929 21:29:14.186145 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-wxv47"] Sep 29 21:29:14 crc kubenswrapper[4911]: I0929 21:29:14.295942 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d2681ca4-c395-47c2-8145-a1da21e6f46f-utilities\") pod \"community-operators-wxv47\" (UID: \"d2681ca4-c395-47c2-8145-a1da21e6f46f\") " pod="openshift-marketplace/community-operators-wxv47" Sep 29 21:29:14 crc kubenswrapper[4911]: I0929 21:29:14.296549 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r5cbc\" (UniqueName: \"kubernetes.io/projected/d2681ca4-c395-47c2-8145-a1da21e6f46f-kube-api-access-r5cbc\") pod \"community-operators-wxv47\" (UID: \"d2681ca4-c395-47c2-8145-a1da21e6f46f\") " pod="openshift-marketplace/community-operators-wxv47" Sep 29 21:29:14 crc kubenswrapper[4911]: I0929 21:29:14.296670 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d2681ca4-c395-47c2-8145-a1da21e6f46f-catalog-content\") pod \"community-operators-wxv47\" (UID: \"d2681ca4-c395-47c2-8145-a1da21e6f46f\") " pod="openshift-marketplace/community-operators-wxv47" Sep 29 21:29:14 crc kubenswrapper[4911]: I0929 21:29:14.362155 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-59bsf"] Sep 29 21:29:14 crc kubenswrapper[4911]: I0929 21:29:14.363232 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-59bsf" Sep 29 21:29:14 crc kubenswrapper[4911]: I0929 21:29:14.366066 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Sep 29 21:29:14 crc kubenswrapper[4911]: I0929 21:29:14.380136 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-59bsf"] Sep 29 21:29:14 crc kubenswrapper[4911]: I0929 21:29:14.397903 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d2681ca4-c395-47c2-8145-a1da21e6f46f-utilities\") pod \"community-operators-wxv47\" (UID: \"d2681ca4-c395-47c2-8145-a1da21e6f46f\") " pod="openshift-marketplace/community-operators-wxv47" Sep 29 21:29:14 crc kubenswrapper[4911]: I0929 21:29:14.397980 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r5cbc\" (UniqueName: \"kubernetes.io/projected/d2681ca4-c395-47c2-8145-a1da21e6f46f-kube-api-access-r5cbc\") pod \"community-operators-wxv47\" (UID: \"d2681ca4-c395-47c2-8145-a1da21e6f46f\") " pod="openshift-marketplace/community-operators-wxv47" Sep 29 21:29:14 crc kubenswrapper[4911]: I0929 21:29:14.398012 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d2681ca4-c395-47c2-8145-a1da21e6f46f-catalog-content\") pod \"community-operators-wxv47\" (UID: \"d2681ca4-c395-47c2-8145-a1da21e6f46f\") " pod="openshift-marketplace/community-operators-wxv47" Sep 29 21:29:14 crc kubenswrapper[4911]: I0929 21:29:14.398477 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d2681ca4-c395-47c2-8145-a1da21e6f46f-catalog-content\") pod \"community-operators-wxv47\" (UID: \"d2681ca4-c395-47c2-8145-a1da21e6f46f\") " pod="openshift-marketplace/community-operators-wxv47" Sep 29 21:29:14 crc kubenswrapper[4911]: I0929 21:29:14.398696 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d2681ca4-c395-47c2-8145-a1da21e6f46f-utilities\") pod \"community-operators-wxv47\" (UID: \"d2681ca4-c395-47c2-8145-a1da21e6f46f\") " pod="openshift-marketplace/community-operators-wxv47" Sep 29 21:29:14 crc kubenswrapper[4911]: I0929 21:29:14.421154 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r5cbc\" (UniqueName: \"kubernetes.io/projected/d2681ca4-c395-47c2-8145-a1da21e6f46f-kube-api-access-r5cbc\") pod \"community-operators-wxv47\" (UID: \"d2681ca4-c395-47c2-8145-a1da21e6f46f\") " pod="openshift-marketplace/community-operators-wxv47" Sep 29 21:29:14 crc kubenswrapper[4911]: I0929 21:29:14.500244 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/053ef77e-10da-46b9-bc85-77d52f64b576-catalog-content\") pod \"certified-operators-59bsf\" (UID: \"053ef77e-10da-46b9-bc85-77d52f64b576\") " pod="openshift-marketplace/certified-operators-59bsf" Sep 29 21:29:14 crc kubenswrapper[4911]: I0929 21:29:14.500312 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jkqmz\" (UniqueName: \"kubernetes.io/projected/053ef77e-10da-46b9-bc85-77d52f64b576-kube-api-access-jkqmz\") pod \"certified-operators-59bsf\" (UID: \"053ef77e-10da-46b9-bc85-77d52f64b576\") " pod="openshift-marketplace/certified-operators-59bsf" Sep 29 21:29:14 crc kubenswrapper[4911]: I0929 21:29:14.500343 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/053ef77e-10da-46b9-bc85-77d52f64b576-utilities\") pod \"certified-operators-59bsf\" (UID: \"053ef77e-10da-46b9-bc85-77d52f64b576\") " pod="openshift-marketplace/certified-operators-59bsf" Sep 29 21:29:14 crc kubenswrapper[4911]: I0929 21:29:14.543681 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wxv47" Sep 29 21:29:14 crc kubenswrapper[4911]: I0929 21:29:14.600941 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jkqmz\" (UniqueName: \"kubernetes.io/projected/053ef77e-10da-46b9-bc85-77d52f64b576-kube-api-access-jkqmz\") pod \"certified-operators-59bsf\" (UID: \"053ef77e-10da-46b9-bc85-77d52f64b576\") " pod="openshift-marketplace/certified-operators-59bsf" Sep 29 21:29:14 crc kubenswrapper[4911]: I0929 21:29:14.601781 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/053ef77e-10da-46b9-bc85-77d52f64b576-utilities\") pod \"certified-operators-59bsf\" (UID: \"053ef77e-10da-46b9-bc85-77d52f64b576\") " pod="openshift-marketplace/certified-operators-59bsf" Sep 29 21:29:14 crc kubenswrapper[4911]: I0929 21:29:14.602150 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/053ef77e-10da-46b9-bc85-77d52f64b576-catalog-content\") pod \"certified-operators-59bsf\" (UID: \"053ef77e-10da-46b9-bc85-77d52f64b576\") " pod="openshift-marketplace/certified-operators-59bsf" Sep 29 21:29:14 crc kubenswrapper[4911]: I0929 21:29:14.602517 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/053ef77e-10da-46b9-bc85-77d52f64b576-utilities\") pod \"certified-operators-59bsf\" (UID: \"053ef77e-10da-46b9-bc85-77d52f64b576\") " pod="openshift-marketplace/certified-operators-59bsf" Sep 29 21:29:14 crc kubenswrapper[4911]: I0929 21:29:14.602705 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/053ef77e-10da-46b9-bc85-77d52f64b576-catalog-content\") pod \"certified-operators-59bsf\" (UID: \"053ef77e-10da-46b9-bc85-77d52f64b576\") " pod="openshift-marketplace/certified-operators-59bsf" Sep 29 21:29:14 crc kubenswrapper[4911]: I0929 21:29:14.620821 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jkqmz\" (UniqueName: \"kubernetes.io/projected/053ef77e-10da-46b9-bc85-77d52f64b576-kube-api-access-jkqmz\") pod \"certified-operators-59bsf\" (UID: \"053ef77e-10da-46b9-bc85-77d52f64b576\") " pod="openshift-marketplace/certified-operators-59bsf" Sep 29 21:29:14 crc kubenswrapper[4911]: I0929 21:29:14.643731 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rm59v" event={"ID":"61bf2025-9b8f-4cbb-8667-c58e05bb8706","Type":"ContainerStarted","Data":"49be46897f11ae81349637299724d17f03d602005b099b9ddc3ae0943b7f5317"} Sep 29 21:29:14 crc kubenswrapper[4911]: I0929 21:29:14.651400 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7578h" event={"ID":"d3e76ef6-306e-4953-a379-367d277b9db4","Type":"ContainerStarted","Data":"1ebb92d289c0c485d3da85f8769945cf1a6acebd958aa4e543fc3fdd7202ea4e"} Sep 29 21:29:14 crc kubenswrapper[4911]: I0929 21:29:14.674361 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-rm59v" podStartSLOduration=2.159521702 podStartE2EDuration="3.674329848s" podCreationTimestamp="2025-09-29 21:29:11 +0000 UTC" firstStartedPulling="2025-09-29 21:29:12.624349855 +0000 UTC m=+230.601462526" lastFinishedPulling="2025-09-29 21:29:14.139158001 +0000 UTC m=+232.116270672" observedRunningTime="2025-09-29 21:29:14.668386738 +0000 UTC m=+232.645499419" watchObservedRunningTime="2025-09-29 21:29:14.674329848 +0000 UTC m=+232.651442519" Sep 29 21:29:14 crc kubenswrapper[4911]: I0929 21:29:14.685529 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-59bsf" Sep 29 21:29:14 crc kubenswrapper[4911]: I0929 21:29:14.992878 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-wxv47"] Sep 29 21:29:15 crc kubenswrapper[4911]: W0929 21:29:15.009178 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd2681ca4_c395_47c2_8145_a1da21e6f46f.slice/crio-221809a4bc7da5275e3937474715d20b217cb867ae3bfd1b0e72afac874950db WatchSource:0}: Error finding container 221809a4bc7da5275e3937474715d20b217cb867ae3bfd1b0e72afac874950db: Status 404 returned error can't find the container with id 221809a4bc7da5275e3937474715d20b217cb867ae3bfd1b0e72afac874950db Sep 29 21:29:15 crc kubenswrapper[4911]: I0929 21:29:15.136802 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-59bsf"] Sep 29 21:29:15 crc kubenswrapper[4911]: I0929 21:29:15.658952 4911 generic.go:334] "Generic (PLEG): container finished" podID="d3e76ef6-306e-4953-a379-367d277b9db4" containerID="1ebb92d289c0c485d3da85f8769945cf1a6acebd958aa4e543fc3fdd7202ea4e" exitCode=0 Sep 29 21:29:15 crc kubenswrapper[4911]: I0929 21:29:15.659067 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7578h" event={"ID":"d3e76ef6-306e-4953-a379-367d277b9db4","Type":"ContainerDied","Data":"1ebb92d289c0c485d3da85f8769945cf1a6acebd958aa4e543fc3fdd7202ea4e"} Sep 29 21:29:15 crc kubenswrapper[4911]: I0929 21:29:15.660707 4911 generic.go:334] "Generic (PLEG): container finished" podID="d2681ca4-c395-47c2-8145-a1da21e6f46f" containerID="37a27cf2f5a8f007b746c7ec74e3c5b91c91f9a6f94e09e881b5e44452320ef1" exitCode=0 Sep 29 21:29:15 crc kubenswrapper[4911]: I0929 21:29:15.660779 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wxv47" event={"ID":"d2681ca4-c395-47c2-8145-a1da21e6f46f","Type":"ContainerDied","Data":"37a27cf2f5a8f007b746c7ec74e3c5b91c91f9a6f94e09e881b5e44452320ef1"} Sep 29 21:29:15 crc kubenswrapper[4911]: I0929 21:29:15.660839 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wxv47" event={"ID":"d2681ca4-c395-47c2-8145-a1da21e6f46f","Type":"ContainerStarted","Data":"221809a4bc7da5275e3937474715d20b217cb867ae3bfd1b0e72afac874950db"} Sep 29 21:29:15 crc kubenswrapper[4911]: I0929 21:29:15.664083 4911 generic.go:334] "Generic (PLEG): container finished" podID="053ef77e-10da-46b9-bc85-77d52f64b576" containerID="45962738817ed5e76af311ce8dd5eda9f119c5ab69e6cd4b3b019a71f7730523" exitCode=0 Sep 29 21:29:15 crc kubenswrapper[4911]: I0929 21:29:15.665658 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-59bsf" event={"ID":"053ef77e-10da-46b9-bc85-77d52f64b576","Type":"ContainerDied","Data":"45962738817ed5e76af311ce8dd5eda9f119c5ab69e6cd4b3b019a71f7730523"} Sep 29 21:29:15 crc kubenswrapper[4911]: I0929 21:29:15.665694 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-59bsf" event={"ID":"053ef77e-10da-46b9-bc85-77d52f64b576","Type":"ContainerStarted","Data":"6b02c9128f8147c9529e2a58d9b6718985e29936639a1c1692bca0f9dbafdc15"} Sep 29 21:29:16 crc kubenswrapper[4911]: I0929 21:29:16.673914 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wxv47" event={"ID":"d2681ca4-c395-47c2-8145-a1da21e6f46f","Type":"ContainerStarted","Data":"864f95e2a21121c544984996b9b1aaa0dbb12ab9e2640037478aff995d9386a6"} Sep 29 21:29:16 crc kubenswrapper[4911]: I0929 21:29:16.677963 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-59bsf" event={"ID":"053ef77e-10da-46b9-bc85-77d52f64b576","Type":"ContainerStarted","Data":"957412a4b876274eab6314ccb249853469a0d09155e5b04c69c34f04ec5f7da3"} Sep 29 21:29:16 crc kubenswrapper[4911]: I0929 21:29:16.681758 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7578h" event={"ID":"d3e76ef6-306e-4953-a379-367d277b9db4","Type":"ContainerStarted","Data":"a22807b0dc6e01170443b31cc6f95fab5996f4cf5b56753194136898448cb582"} Sep 29 21:29:16 crc kubenswrapper[4911]: I0929 21:29:16.754233 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-7578h" podStartSLOduration=3.332225972 podStartE2EDuration="5.75421074s" podCreationTimestamp="2025-09-29 21:29:11 +0000 UTC" firstStartedPulling="2025-09-29 21:29:13.634405911 +0000 UTC m=+231.611518622" lastFinishedPulling="2025-09-29 21:29:16.056390719 +0000 UTC m=+234.033503390" observedRunningTime="2025-09-29 21:29:16.75139363 +0000 UTC m=+234.728506301" watchObservedRunningTime="2025-09-29 21:29:16.75421074 +0000 UTC m=+234.731323411" Sep 29 21:29:17 crc kubenswrapper[4911]: I0929 21:29:17.691101 4911 generic.go:334] "Generic (PLEG): container finished" podID="d2681ca4-c395-47c2-8145-a1da21e6f46f" containerID="864f95e2a21121c544984996b9b1aaa0dbb12ab9e2640037478aff995d9386a6" exitCode=0 Sep 29 21:29:17 crc kubenswrapper[4911]: I0929 21:29:17.691199 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wxv47" event={"ID":"d2681ca4-c395-47c2-8145-a1da21e6f46f","Type":"ContainerDied","Data":"864f95e2a21121c544984996b9b1aaa0dbb12ab9e2640037478aff995d9386a6"} Sep 29 21:29:17 crc kubenswrapper[4911]: I0929 21:29:17.698459 4911 generic.go:334] "Generic (PLEG): container finished" podID="053ef77e-10da-46b9-bc85-77d52f64b576" containerID="957412a4b876274eab6314ccb249853469a0d09155e5b04c69c34f04ec5f7da3" exitCode=0 Sep 29 21:29:17 crc kubenswrapper[4911]: I0929 21:29:17.698523 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-59bsf" event={"ID":"053ef77e-10da-46b9-bc85-77d52f64b576","Type":"ContainerDied","Data":"957412a4b876274eab6314ccb249853469a0d09155e5b04c69c34f04ec5f7da3"} Sep 29 21:29:18 crc kubenswrapper[4911]: I0929 21:29:18.707036 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wxv47" event={"ID":"d2681ca4-c395-47c2-8145-a1da21e6f46f","Type":"ContainerStarted","Data":"993a5c643f2348e0f5d6e16fb3237c3645b36e1a0c53e9ed822040b7bd3430e7"} Sep 29 21:29:18 crc kubenswrapper[4911]: I0929 21:29:18.711023 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-59bsf" event={"ID":"053ef77e-10da-46b9-bc85-77d52f64b576","Type":"ContainerStarted","Data":"e38d1b871c7a69a3e81016078cbf2ca4c5e68ac96ff88ec3a7b7796cffcfd02a"} Sep 29 21:29:18 crc kubenswrapper[4911]: I0929 21:29:18.733195 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-wxv47" podStartSLOduration=2.2375220909999998 podStartE2EDuration="4.733171372s" podCreationTimestamp="2025-09-29 21:29:14 +0000 UTC" firstStartedPulling="2025-09-29 21:29:15.66197611 +0000 UTC m=+233.639088781" lastFinishedPulling="2025-09-29 21:29:18.157625361 +0000 UTC m=+236.134738062" observedRunningTime="2025-09-29 21:29:18.729768744 +0000 UTC m=+236.706881425" watchObservedRunningTime="2025-09-29 21:29:18.733171372 +0000 UTC m=+236.710284043" Sep 29 21:29:18 crc kubenswrapper[4911]: I0929 21:29:18.752770 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-59bsf" podStartSLOduration=2.33650674 podStartE2EDuration="4.752742865s" podCreationTimestamp="2025-09-29 21:29:14 +0000 UTC" firstStartedPulling="2025-09-29 21:29:15.666679909 +0000 UTC m=+233.643792590" lastFinishedPulling="2025-09-29 21:29:18.082916044 +0000 UTC m=+236.060028715" observedRunningTime="2025-09-29 21:29:18.750621417 +0000 UTC m=+236.727734088" watchObservedRunningTime="2025-09-29 21:29:18.752742865 +0000 UTC m=+236.729855536" Sep 29 21:29:22 crc kubenswrapper[4911]: I0929 21:29:22.107098 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-rm59v" Sep 29 21:29:22 crc kubenswrapper[4911]: I0929 21:29:22.107752 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-rm59v" Sep 29 21:29:22 crc kubenswrapper[4911]: I0929 21:29:22.160100 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-rm59v" Sep 29 21:29:22 crc kubenswrapper[4911]: I0929 21:29:22.293261 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-7578h" Sep 29 21:29:22 crc kubenswrapper[4911]: I0929 21:29:22.293627 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-7578h" Sep 29 21:29:22 crc kubenswrapper[4911]: I0929 21:29:22.332311 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-7578h" Sep 29 21:29:22 crc kubenswrapper[4911]: I0929 21:29:22.781706 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-7578h" Sep 29 21:29:22 crc kubenswrapper[4911]: I0929 21:29:22.786440 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-rm59v" Sep 29 21:29:24 crc kubenswrapper[4911]: I0929 21:29:24.544610 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-wxv47" Sep 29 21:29:24 crc kubenswrapper[4911]: I0929 21:29:24.544685 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-wxv47" Sep 29 21:29:24 crc kubenswrapper[4911]: I0929 21:29:24.586068 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-wxv47" Sep 29 21:29:24 crc kubenswrapper[4911]: I0929 21:29:24.687393 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-59bsf" Sep 29 21:29:24 crc kubenswrapper[4911]: I0929 21:29:24.687585 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-59bsf" Sep 29 21:29:24 crc kubenswrapper[4911]: I0929 21:29:24.751359 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-59bsf" Sep 29 21:29:24 crc kubenswrapper[4911]: I0929 21:29:24.812299 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-wxv47" Sep 29 21:29:25 crc kubenswrapper[4911]: I0929 21:29:25.809651 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-59bsf" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.188386 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-h9qcg" podUID="9a3253d3-c916-477b-82cd-7f7911bfc1b0" containerName="oauth-openshift" containerID="cri-o://4170f3d0ab485bb97d6b890499a504c58a51a12fd94e01bb1f05bdd5ebe8e3b1" gracePeriod=15 Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.612604 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-h9qcg" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.653334 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-57569d6b9d-dzvc9"] Sep 29 21:29:34 crc kubenswrapper[4911]: E0929 21:29:34.653670 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a3253d3-c916-477b-82cd-7f7911bfc1b0" containerName="oauth-openshift" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.653685 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a3253d3-c916-477b-82cd-7f7911bfc1b0" containerName="oauth-openshift" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.653824 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="9a3253d3-c916-477b-82cd-7f7911bfc1b0" containerName="oauth-openshift" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.654335 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-57569d6b9d-dzvc9" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.670446 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-57569d6b9d-dzvc9"] Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.804255 4911 generic.go:334] "Generic (PLEG): container finished" podID="9a3253d3-c916-477b-82cd-7f7911bfc1b0" containerID="4170f3d0ab485bb97d6b890499a504c58a51a12fd94e01bb1f05bdd5ebe8e3b1" exitCode=0 Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.804339 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-h9qcg" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.804352 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-h9qcg" event={"ID":"9a3253d3-c916-477b-82cd-7f7911bfc1b0","Type":"ContainerDied","Data":"4170f3d0ab485bb97d6b890499a504c58a51a12fd94e01bb1f05bdd5ebe8e3b1"} Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.804417 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-h9qcg" event={"ID":"9a3253d3-c916-477b-82cd-7f7911bfc1b0","Type":"ContainerDied","Data":"828df8fd7f7e521d4f1032ac6bcaf4d5d5939bd5d84671ca8dcd6d923cc4fe67"} Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.804464 4911 scope.go:117] "RemoveContainer" containerID="4170f3d0ab485bb97d6b890499a504c58a51a12fd94e01bb1f05bdd5ebe8e3b1" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.806498 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/9a3253d3-c916-477b-82cd-7f7911bfc1b0-v4-0-config-system-ocp-branding-template\") pod \"9a3253d3-c916-477b-82cd-7f7911bfc1b0\" (UID: \"9a3253d3-c916-477b-82cd-7f7911bfc1b0\") " Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.806568 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/9a3253d3-c916-477b-82cd-7f7911bfc1b0-v4-0-config-system-serving-cert\") pod \"9a3253d3-c916-477b-82cd-7f7911bfc1b0\" (UID: \"9a3253d3-c916-477b-82cd-7f7911bfc1b0\") " Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.806651 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/9a3253d3-c916-477b-82cd-7f7911bfc1b0-v4-0-config-user-template-error\") pod \"9a3253d3-c916-477b-82cd-7f7911bfc1b0\" (UID: \"9a3253d3-c916-477b-82cd-7f7911bfc1b0\") " Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.808214 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/9a3253d3-c916-477b-82cd-7f7911bfc1b0-v4-0-config-user-idp-0-file-data\") pod \"9a3253d3-c916-477b-82cd-7f7911bfc1b0\" (UID: \"9a3253d3-c916-477b-82cd-7f7911bfc1b0\") " Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.808262 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/9a3253d3-c916-477b-82cd-7f7911bfc1b0-v4-0-config-system-session\") pod \"9a3253d3-c916-477b-82cd-7f7911bfc1b0\" (UID: \"9a3253d3-c916-477b-82cd-7f7911bfc1b0\") " Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.808304 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/9a3253d3-c916-477b-82cd-7f7911bfc1b0-audit-dir\") pod \"9a3253d3-c916-477b-82cd-7f7911bfc1b0\" (UID: \"9a3253d3-c916-477b-82cd-7f7911bfc1b0\") " Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.808373 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/9a3253d3-c916-477b-82cd-7f7911bfc1b0-v4-0-config-user-template-provider-selection\") pod \"9a3253d3-c916-477b-82cd-7f7911bfc1b0\" (UID: \"9a3253d3-c916-477b-82cd-7f7911bfc1b0\") " Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.808535 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/9a3253d3-c916-477b-82cd-7f7911bfc1b0-v4-0-config-system-service-ca\") pod \"9a3253d3-c916-477b-82cd-7f7911bfc1b0\" (UID: \"9a3253d3-c916-477b-82cd-7f7911bfc1b0\") " Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.808615 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9a3253d3-c916-477b-82cd-7f7911bfc1b0-v4-0-config-system-trusted-ca-bundle\") pod \"9a3253d3-c916-477b-82cd-7f7911bfc1b0\" (UID: \"9a3253d3-c916-477b-82cd-7f7911bfc1b0\") " Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.808690 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/9a3253d3-c916-477b-82cd-7f7911bfc1b0-v4-0-config-user-template-login\") pod \"9a3253d3-c916-477b-82cd-7f7911bfc1b0\" (UID: \"9a3253d3-c916-477b-82cd-7f7911bfc1b0\") " Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.808778 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/9a3253d3-c916-477b-82cd-7f7911bfc1b0-v4-0-config-system-cliconfig\") pod \"9a3253d3-c916-477b-82cd-7f7911bfc1b0\" (UID: \"9a3253d3-c916-477b-82cd-7f7911bfc1b0\") " Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.809093 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/9a3253d3-c916-477b-82cd-7f7911bfc1b0-v4-0-config-system-router-certs\") pod \"9a3253d3-c916-477b-82cd-7f7911bfc1b0\" (UID: \"9a3253d3-c916-477b-82cd-7f7911bfc1b0\") " Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.809177 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/9a3253d3-c916-477b-82cd-7f7911bfc1b0-audit-policies\") pod \"9a3253d3-c916-477b-82cd-7f7911bfc1b0\" (UID: \"9a3253d3-c916-477b-82cd-7f7911bfc1b0\") " Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.809243 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sbjl5\" (UniqueName: \"kubernetes.io/projected/9a3253d3-c916-477b-82cd-7f7911bfc1b0-kube-api-access-sbjl5\") pod \"9a3253d3-c916-477b-82cd-7f7911bfc1b0\" (UID: \"9a3253d3-c916-477b-82cd-7f7911bfc1b0\") " Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.809589 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/a41d9586-c53f-4fe9-8422-02cf9c1e0209-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-57569d6b9d-dzvc9\" (UID: \"a41d9586-c53f-4fe9-8422-02cf9c1e0209\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-dzvc9" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.809709 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/a41d9586-c53f-4fe9-8422-02cf9c1e0209-v4-0-config-system-service-ca\") pod \"oauth-openshift-57569d6b9d-dzvc9\" (UID: \"a41d9586-c53f-4fe9-8422-02cf9c1e0209\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-dzvc9" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.808585 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9a3253d3-c916-477b-82cd-7f7911bfc1b0-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "9a3253d3-c916-477b-82cd-7f7911bfc1b0" (UID: "9a3253d3-c916-477b-82cd-7f7911bfc1b0"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.809789 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a41d9586-c53f-4fe9-8422-02cf9c1e0209-audit-policies\") pod \"oauth-openshift-57569d6b9d-dzvc9\" (UID: \"a41d9586-c53f-4fe9-8422-02cf9c1e0209\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-dzvc9" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.809481 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9a3253d3-c916-477b-82cd-7f7911bfc1b0-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "9a3253d3-c916-477b-82cd-7f7911bfc1b0" (UID: "9a3253d3-c916-477b-82cd-7f7911bfc1b0"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.809756 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9a3253d3-c916-477b-82cd-7f7911bfc1b0-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "9a3253d3-c916-477b-82cd-7f7911bfc1b0" (UID: "9a3253d3-c916-477b-82cd-7f7911bfc1b0"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.809921 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a41d9586-c53f-4fe9-8422-02cf9c1e0209-audit-dir\") pod \"oauth-openshift-57569d6b9d-dzvc9\" (UID: \"a41d9586-c53f-4fe9-8422-02cf9c1e0209\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-dzvc9" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.809960 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sn6vz\" (UniqueName: \"kubernetes.io/projected/a41d9586-c53f-4fe9-8422-02cf9c1e0209-kube-api-access-sn6vz\") pod \"oauth-openshift-57569d6b9d-dzvc9\" (UID: \"a41d9586-c53f-4fe9-8422-02cf9c1e0209\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-dzvc9" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.810024 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/a41d9586-c53f-4fe9-8422-02cf9c1e0209-v4-0-config-system-session\") pod \"oauth-openshift-57569d6b9d-dzvc9\" (UID: \"a41d9586-c53f-4fe9-8422-02cf9c1e0209\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-dzvc9" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.810091 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/a41d9586-c53f-4fe9-8422-02cf9c1e0209-v4-0-config-system-serving-cert\") pod \"oauth-openshift-57569d6b9d-dzvc9\" (UID: \"a41d9586-c53f-4fe9-8422-02cf9c1e0209\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-dzvc9" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.810151 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/a41d9586-c53f-4fe9-8422-02cf9c1e0209-v4-0-config-system-cliconfig\") pod \"oauth-openshift-57569d6b9d-dzvc9\" (UID: \"a41d9586-c53f-4fe9-8422-02cf9c1e0209\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-dzvc9" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.810237 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a41d9586-c53f-4fe9-8422-02cf9c1e0209-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-57569d6b9d-dzvc9\" (UID: \"a41d9586-c53f-4fe9-8422-02cf9c1e0209\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-dzvc9" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.810308 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/a41d9586-c53f-4fe9-8422-02cf9c1e0209-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-57569d6b9d-dzvc9\" (UID: \"a41d9586-c53f-4fe9-8422-02cf9c1e0209\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-dzvc9" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.810341 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/a41d9586-c53f-4fe9-8422-02cf9c1e0209-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-57569d6b9d-dzvc9\" (UID: \"a41d9586-c53f-4fe9-8422-02cf9c1e0209\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-dzvc9" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.810424 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/a41d9586-c53f-4fe9-8422-02cf9c1e0209-v4-0-config-user-template-login\") pod \"oauth-openshift-57569d6b9d-dzvc9\" (UID: \"a41d9586-c53f-4fe9-8422-02cf9c1e0209\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-dzvc9" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.810429 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9a3253d3-c916-477b-82cd-7f7911bfc1b0-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "9a3253d3-c916-477b-82cd-7f7911bfc1b0" (UID: "9a3253d3-c916-477b-82cd-7f7911bfc1b0"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.810480 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/a41d9586-c53f-4fe9-8422-02cf9c1e0209-v4-0-config-system-router-certs\") pod \"oauth-openshift-57569d6b9d-dzvc9\" (UID: \"a41d9586-c53f-4fe9-8422-02cf9c1e0209\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-dzvc9" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.810501 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/a41d9586-c53f-4fe9-8422-02cf9c1e0209-v4-0-config-user-template-error\") pod \"oauth-openshift-57569d6b9d-dzvc9\" (UID: \"a41d9586-c53f-4fe9-8422-02cf9c1e0209\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-dzvc9" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.810637 4911 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/9a3253d3-c916-477b-82cd-7f7911bfc1b0-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.810651 4911 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/9a3253d3-c916-477b-82cd-7f7911bfc1b0-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.810663 4911 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/9a3253d3-c916-477b-82cd-7f7911bfc1b0-audit-policies\") on node \"crc\" DevicePath \"\"" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.810679 4911 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/9a3253d3-c916-477b-82cd-7f7911bfc1b0-audit-dir\") on node \"crc\" DevicePath \"\"" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.810883 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9a3253d3-c916-477b-82cd-7f7911bfc1b0-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "9a3253d3-c916-477b-82cd-7f7911bfc1b0" (UID: "9a3253d3-c916-477b-82cd-7f7911bfc1b0"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.813345 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9a3253d3-c916-477b-82cd-7f7911bfc1b0-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "9a3253d3-c916-477b-82cd-7f7911bfc1b0" (UID: "9a3253d3-c916-477b-82cd-7f7911bfc1b0"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.814001 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9a3253d3-c916-477b-82cd-7f7911bfc1b0-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "9a3253d3-c916-477b-82cd-7f7911bfc1b0" (UID: "9a3253d3-c916-477b-82cd-7f7911bfc1b0"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.814380 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9a3253d3-c916-477b-82cd-7f7911bfc1b0-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "9a3253d3-c916-477b-82cd-7f7911bfc1b0" (UID: "9a3253d3-c916-477b-82cd-7f7911bfc1b0"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.815131 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9a3253d3-c916-477b-82cd-7f7911bfc1b0-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "9a3253d3-c916-477b-82cd-7f7911bfc1b0" (UID: "9a3253d3-c916-477b-82cd-7f7911bfc1b0"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.815592 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9a3253d3-c916-477b-82cd-7f7911bfc1b0-kube-api-access-sbjl5" (OuterVolumeSpecName: "kube-api-access-sbjl5") pod "9a3253d3-c916-477b-82cd-7f7911bfc1b0" (UID: "9a3253d3-c916-477b-82cd-7f7911bfc1b0"). InnerVolumeSpecName "kube-api-access-sbjl5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.817216 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9a3253d3-c916-477b-82cd-7f7911bfc1b0-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "9a3253d3-c916-477b-82cd-7f7911bfc1b0" (UID: "9a3253d3-c916-477b-82cd-7f7911bfc1b0"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.817344 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9a3253d3-c916-477b-82cd-7f7911bfc1b0-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "9a3253d3-c916-477b-82cd-7f7911bfc1b0" (UID: "9a3253d3-c916-477b-82cd-7f7911bfc1b0"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.822935 4911 scope.go:117] "RemoveContainer" containerID="4170f3d0ab485bb97d6b890499a504c58a51a12fd94e01bb1f05bdd5ebe8e3b1" Sep 29 21:29:34 crc kubenswrapper[4911]: E0929 21:29:34.824007 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4170f3d0ab485bb97d6b890499a504c58a51a12fd94e01bb1f05bdd5ebe8e3b1\": container with ID starting with 4170f3d0ab485bb97d6b890499a504c58a51a12fd94e01bb1f05bdd5ebe8e3b1 not found: ID does not exist" containerID="4170f3d0ab485bb97d6b890499a504c58a51a12fd94e01bb1f05bdd5ebe8e3b1" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.824051 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4170f3d0ab485bb97d6b890499a504c58a51a12fd94e01bb1f05bdd5ebe8e3b1"} err="failed to get container status \"4170f3d0ab485bb97d6b890499a504c58a51a12fd94e01bb1f05bdd5ebe8e3b1\": rpc error: code = NotFound desc = could not find container \"4170f3d0ab485bb97d6b890499a504c58a51a12fd94e01bb1f05bdd5ebe8e3b1\": container with ID starting with 4170f3d0ab485bb97d6b890499a504c58a51a12fd94e01bb1f05bdd5ebe8e3b1 not found: ID does not exist" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.827952 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9a3253d3-c916-477b-82cd-7f7911bfc1b0-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "9a3253d3-c916-477b-82cd-7f7911bfc1b0" (UID: "9a3253d3-c916-477b-82cd-7f7911bfc1b0"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.829272 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9a3253d3-c916-477b-82cd-7f7911bfc1b0-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "9a3253d3-c916-477b-82cd-7f7911bfc1b0" (UID: "9a3253d3-c916-477b-82cd-7f7911bfc1b0"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.912941 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/a41d9586-c53f-4fe9-8422-02cf9c1e0209-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-57569d6b9d-dzvc9\" (UID: \"a41d9586-c53f-4fe9-8422-02cf9c1e0209\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-dzvc9" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.913337 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/a41d9586-c53f-4fe9-8422-02cf9c1e0209-v4-0-config-user-template-login\") pod \"oauth-openshift-57569d6b9d-dzvc9\" (UID: \"a41d9586-c53f-4fe9-8422-02cf9c1e0209\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-dzvc9" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.913415 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/a41d9586-c53f-4fe9-8422-02cf9c1e0209-v4-0-config-system-router-certs\") pod \"oauth-openshift-57569d6b9d-dzvc9\" (UID: \"a41d9586-c53f-4fe9-8422-02cf9c1e0209\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-dzvc9" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.913459 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/a41d9586-c53f-4fe9-8422-02cf9c1e0209-v4-0-config-user-template-error\") pod \"oauth-openshift-57569d6b9d-dzvc9\" (UID: \"a41d9586-c53f-4fe9-8422-02cf9c1e0209\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-dzvc9" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.913504 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/a41d9586-c53f-4fe9-8422-02cf9c1e0209-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-57569d6b9d-dzvc9\" (UID: \"a41d9586-c53f-4fe9-8422-02cf9c1e0209\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-dzvc9" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.913563 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/a41d9586-c53f-4fe9-8422-02cf9c1e0209-v4-0-config-system-service-ca\") pod \"oauth-openshift-57569d6b9d-dzvc9\" (UID: \"a41d9586-c53f-4fe9-8422-02cf9c1e0209\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-dzvc9" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.913627 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a41d9586-c53f-4fe9-8422-02cf9c1e0209-audit-policies\") pod \"oauth-openshift-57569d6b9d-dzvc9\" (UID: \"a41d9586-c53f-4fe9-8422-02cf9c1e0209\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-dzvc9" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.913677 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sn6vz\" (UniqueName: \"kubernetes.io/projected/a41d9586-c53f-4fe9-8422-02cf9c1e0209-kube-api-access-sn6vz\") pod \"oauth-openshift-57569d6b9d-dzvc9\" (UID: \"a41d9586-c53f-4fe9-8422-02cf9c1e0209\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-dzvc9" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.913713 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a41d9586-c53f-4fe9-8422-02cf9c1e0209-audit-dir\") pod \"oauth-openshift-57569d6b9d-dzvc9\" (UID: \"a41d9586-c53f-4fe9-8422-02cf9c1e0209\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-dzvc9" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.913759 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/a41d9586-c53f-4fe9-8422-02cf9c1e0209-v4-0-config-system-session\") pod \"oauth-openshift-57569d6b9d-dzvc9\" (UID: \"a41d9586-c53f-4fe9-8422-02cf9c1e0209\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-dzvc9" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.913838 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/a41d9586-c53f-4fe9-8422-02cf9c1e0209-v4-0-config-system-serving-cert\") pod \"oauth-openshift-57569d6b9d-dzvc9\" (UID: \"a41d9586-c53f-4fe9-8422-02cf9c1e0209\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-dzvc9" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.913881 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/a41d9586-c53f-4fe9-8422-02cf9c1e0209-v4-0-config-system-cliconfig\") pod \"oauth-openshift-57569d6b9d-dzvc9\" (UID: \"a41d9586-c53f-4fe9-8422-02cf9c1e0209\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-dzvc9" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.913921 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a41d9586-c53f-4fe9-8422-02cf9c1e0209-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-57569d6b9d-dzvc9\" (UID: \"a41d9586-c53f-4fe9-8422-02cf9c1e0209\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-dzvc9" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.913961 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/a41d9586-c53f-4fe9-8422-02cf9c1e0209-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-57569d6b9d-dzvc9\" (UID: \"a41d9586-c53f-4fe9-8422-02cf9c1e0209\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-dzvc9" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.914095 4911 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9a3253d3-c916-477b-82cd-7f7911bfc1b0-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.914145 4911 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/9a3253d3-c916-477b-82cd-7f7911bfc1b0-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.914170 4911 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/9a3253d3-c916-477b-82cd-7f7911bfc1b0-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.914195 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sbjl5\" (UniqueName: \"kubernetes.io/projected/9a3253d3-c916-477b-82cd-7f7911bfc1b0-kube-api-access-sbjl5\") on node \"crc\" DevicePath \"\"" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.914220 4911 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/9a3253d3-c916-477b-82cd-7f7911bfc1b0-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.914254 4911 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/9a3253d3-c916-477b-82cd-7f7911bfc1b0-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.914277 4911 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/9a3253d3-c916-477b-82cd-7f7911bfc1b0-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.914298 4911 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/9a3253d3-c916-477b-82cd-7f7911bfc1b0-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.914318 4911 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/9a3253d3-c916-477b-82cd-7f7911bfc1b0-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.914340 4911 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/9a3253d3-c916-477b-82cd-7f7911bfc1b0-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.914762 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a41d9586-c53f-4fe9-8422-02cf9c1e0209-audit-dir\") pod \"oauth-openshift-57569d6b9d-dzvc9\" (UID: \"a41d9586-c53f-4fe9-8422-02cf9c1e0209\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-dzvc9" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.915374 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/a41d9586-c53f-4fe9-8422-02cf9c1e0209-v4-0-config-system-service-ca\") pod \"oauth-openshift-57569d6b9d-dzvc9\" (UID: \"a41d9586-c53f-4fe9-8422-02cf9c1e0209\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-dzvc9" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.915498 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/a41d9586-c53f-4fe9-8422-02cf9c1e0209-v4-0-config-system-cliconfig\") pod \"oauth-openshift-57569d6b9d-dzvc9\" (UID: \"a41d9586-c53f-4fe9-8422-02cf9c1e0209\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-dzvc9" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.915917 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a41d9586-c53f-4fe9-8422-02cf9c1e0209-audit-policies\") pod \"oauth-openshift-57569d6b9d-dzvc9\" (UID: \"a41d9586-c53f-4fe9-8422-02cf9c1e0209\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-dzvc9" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.917578 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/a41d9586-c53f-4fe9-8422-02cf9c1e0209-v4-0-config-user-template-login\") pod \"oauth-openshift-57569d6b9d-dzvc9\" (UID: \"a41d9586-c53f-4fe9-8422-02cf9c1e0209\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-dzvc9" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.917615 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/a41d9586-c53f-4fe9-8422-02cf9c1e0209-v4-0-config-system-router-certs\") pod \"oauth-openshift-57569d6b9d-dzvc9\" (UID: \"a41d9586-c53f-4fe9-8422-02cf9c1e0209\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-dzvc9" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.917877 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/a41d9586-c53f-4fe9-8422-02cf9c1e0209-v4-0-config-user-template-error\") pod \"oauth-openshift-57569d6b9d-dzvc9\" (UID: \"a41d9586-c53f-4fe9-8422-02cf9c1e0209\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-dzvc9" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.918233 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/a41d9586-c53f-4fe9-8422-02cf9c1e0209-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-57569d6b9d-dzvc9\" (UID: \"a41d9586-c53f-4fe9-8422-02cf9c1e0209\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-dzvc9" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.918401 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/a41d9586-c53f-4fe9-8422-02cf9c1e0209-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-57569d6b9d-dzvc9\" (UID: \"a41d9586-c53f-4fe9-8422-02cf9c1e0209\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-dzvc9" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.918511 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a41d9586-c53f-4fe9-8422-02cf9c1e0209-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-57569d6b9d-dzvc9\" (UID: \"a41d9586-c53f-4fe9-8422-02cf9c1e0209\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-dzvc9" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.919127 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/a41d9586-c53f-4fe9-8422-02cf9c1e0209-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-57569d6b9d-dzvc9\" (UID: \"a41d9586-c53f-4fe9-8422-02cf9c1e0209\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-dzvc9" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.919366 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/a41d9586-c53f-4fe9-8422-02cf9c1e0209-v4-0-config-system-session\") pod \"oauth-openshift-57569d6b9d-dzvc9\" (UID: \"a41d9586-c53f-4fe9-8422-02cf9c1e0209\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-dzvc9" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.921661 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/a41d9586-c53f-4fe9-8422-02cf9c1e0209-v4-0-config-system-serving-cert\") pod \"oauth-openshift-57569d6b9d-dzvc9\" (UID: \"a41d9586-c53f-4fe9-8422-02cf9c1e0209\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-dzvc9" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.930319 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sn6vz\" (UniqueName: \"kubernetes.io/projected/a41d9586-c53f-4fe9-8422-02cf9c1e0209-kube-api-access-sn6vz\") pod \"oauth-openshift-57569d6b9d-dzvc9\" (UID: \"a41d9586-c53f-4fe9-8422-02cf9c1e0209\") " pod="openshift-authentication/oauth-openshift-57569d6b9d-dzvc9" Sep 29 21:29:34 crc kubenswrapper[4911]: I0929 21:29:34.969887 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-57569d6b9d-dzvc9" Sep 29 21:29:35 crc kubenswrapper[4911]: I0929 21:29:35.150557 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-h9qcg"] Sep 29 21:29:35 crc kubenswrapper[4911]: I0929 21:29:35.159971 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-h9qcg"] Sep 29 21:29:35 crc kubenswrapper[4911]: I0929 21:29:35.415058 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-57569d6b9d-dzvc9"] Sep 29 21:29:35 crc kubenswrapper[4911]: W0929 21:29:35.430244 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda41d9586_c53f_4fe9_8422_02cf9c1e0209.slice/crio-b72034a2619c8de666d6831b5f0beacd678cd3f4d73005420ae2ab24e706388e WatchSource:0}: Error finding container b72034a2619c8de666d6831b5f0beacd678cd3f4d73005420ae2ab24e706388e: Status 404 returned error can't find the container with id b72034a2619c8de666d6831b5f0beacd678cd3f4d73005420ae2ab24e706388e Sep 29 21:29:35 crc kubenswrapper[4911]: I0929 21:29:35.812566 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-57569d6b9d-dzvc9" event={"ID":"a41d9586-c53f-4fe9-8422-02cf9c1e0209","Type":"ContainerStarted","Data":"fb48ed82a20a5ac3fa7ccc7a70a5a969daa7acb2f65288e1ad0a803c297576a1"} Sep 29 21:29:35 crc kubenswrapper[4911]: I0929 21:29:35.812611 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-57569d6b9d-dzvc9" event={"ID":"a41d9586-c53f-4fe9-8422-02cf9c1e0209","Type":"ContainerStarted","Data":"b72034a2619c8de666d6831b5f0beacd678cd3f4d73005420ae2ab24e706388e"} Sep 29 21:29:35 crc kubenswrapper[4911]: I0929 21:29:35.813265 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-57569d6b9d-dzvc9" Sep 29 21:29:35 crc kubenswrapper[4911]: I0929 21:29:35.837024 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-57569d6b9d-dzvc9" podStartSLOduration=26.836995274 podStartE2EDuration="26.836995274s" podCreationTimestamp="2025-09-29 21:29:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:29:35.833086499 +0000 UTC m=+253.810199170" watchObservedRunningTime="2025-09-29 21:29:35.836995274 +0000 UTC m=+253.814107945" Sep 29 21:29:36 crc kubenswrapper[4911]: I0929 21:29:36.063617 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-57569d6b9d-dzvc9" Sep 29 21:29:36 crc kubenswrapper[4911]: I0929 21:29:36.708431 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9a3253d3-c916-477b-82cd-7f7911bfc1b0" path="/var/lib/kubelet/pods/9a3253d3-c916-477b-82cd-7f7911bfc1b0/volumes" Sep 29 21:30:00 crc kubenswrapper[4911]: I0929 21:30:00.147038 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319690-42m2v"] Sep 29 21:30:00 crc kubenswrapper[4911]: I0929 21:30:00.148995 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319690-42m2v" Sep 29 21:30:00 crc kubenswrapper[4911]: I0929 21:30:00.154149 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 29 21:30:00 crc kubenswrapper[4911]: I0929 21:30:00.154149 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 29 21:30:00 crc kubenswrapper[4911]: I0929 21:30:00.162510 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319690-42m2v"] Sep 29 21:30:00 crc kubenswrapper[4911]: I0929 21:30:00.220012 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m6dv8\" (UniqueName: \"kubernetes.io/projected/9a3bdae8-c904-4591-bb7d-0e00a24975fb-kube-api-access-m6dv8\") pod \"collect-profiles-29319690-42m2v\" (UID: \"9a3bdae8-c904-4591-bb7d-0e00a24975fb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319690-42m2v" Sep 29 21:30:00 crc kubenswrapper[4911]: I0929 21:30:00.220076 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9a3bdae8-c904-4591-bb7d-0e00a24975fb-secret-volume\") pod \"collect-profiles-29319690-42m2v\" (UID: \"9a3bdae8-c904-4591-bb7d-0e00a24975fb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319690-42m2v" Sep 29 21:30:00 crc kubenswrapper[4911]: I0929 21:30:00.220158 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9a3bdae8-c904-4591-bb7d-0e00a24975fb-config-volume\") pod \"collect-profiles-29319690-42m2v\" (UID: \"9a3bdae8-c904-4591-bb7d-0e00a24975fb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319690-42m2v" Sep 29 21:30:00 crc kubenswrapper[4911]: I0929 21:30:00.322152 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9a3bdae8-c904-4591-bb7d-0e00a24975fb-config-volume\") pod \"collect-profiles-29319690-42m2v\" (UID: \"9a3bdae8-c904-4591-bb7d-0e00a24975fb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319690-42m2v" Sep 29 21:30:00 crc kubenswrapper[4911]: I0929 21:30:00.322313 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m6dv8\" (UniqueName: \"kubernetes.io/projected/9a3bdae8-c904-4591-bb7d-0e00a24975fb-kube-api-access-m6dv8\") pod \"collect-profiles-29319690-42m2v\" (UID: \"9a3bdae8-c904-4591-bb7d-0e00a24975fb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319690-42m2v" Sep 29 21:30:00 crc kubenswrapper[4911]: I0929 21:30:00.322417 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9a3bdae8-c904-4591-bb7d-0e00a24975fb-secret-volume\") pod \"collect-profiles-29319690-42m2v\" (UID: \"9a3bdae8-c904-4591-bb7d-0e00a24975fb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319690-42m2v" Sep 29 21:30:00 crc kubenswrapper[4911]: I0929 21:30:00.324886 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9a3bdae8-c904-4591-bb7d-0e00a24975fb-config-volume\") pod \"collect-profiles-29319690-42m2v\" (UID: \"9a3bdae8-c904-4591-bb7d-0e00a24975fb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319690-42m2v" Sep 29 21:30:00 crc kubenswrapper[4911]: I0929 21:30:00.334089 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9a3bdae8-c904-4591-bb7d-0e00a24975fb-secret-volume\") pod \"collect-profiles-29319690-42m2v\" (UID: \"9a3bdae8-c904-4591-bb7d-0e00a24975fb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319690-42m2v" Sep 29 21:30:00 crc kubenswrapper[4911]: I0929 21:30:00.343855 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m6dv8\" (UniqueName: \"kubernetes.io/projected/9a3bdae8-c904-4591-bb7d-0e00a24975fb-kube-api-access-m6dv8\") pod \"collect-profiles-29319690-42m2v\" (UID: \"9a3bdae8-c904-4591-bb7d-0e00a24975fb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319690-42m2v" Sep 29 21:30:00 crc kubenswrapper[4911]: I0929 21:30:00.470011 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319690-42m2v" Sep 29 21:30:00 crc kubenswrapper[4911]: I0929 21:30:00.912173 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319690-42m2v"] Sep 29 21:30:00 crc kubenswrapper[4911]: I0929 21:30:00.994314 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319690-42m2v" event={"ID":"9a3bdae8-c904-4591-bb7d-0e00a24975fb","Type":"ContainerStarted","Data":"5a5904d232c4b0eb84442f600599fdd51ba1371d31cc88f335cb3fd1db844e5e"} Sep 29 21:30:02 crc kubenswrapper[4911]: I0929 21:30:02.004350 4911 generic.go:334] "Generic (PLEG): container finished" podID="9a3bdae8-c904-4591-bb7d-0e00a24975fb" containerID="dd07c683178b49310e59bb68eff005164f906e6bdaa0d135377fa59e6c19f44a" exitCode=0 Sep 29 21:30:02 crc kubenswrapper[4911]: I0929 21:30:02.004442 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319690-42m2v" event={"ID":"9a3bdae8-c904-4591-bb7d-0e00a24975fb","Type":"ContainerDied","Data":"dd07c683178b49310e59bb68eff005164f906e6bdaa0d135377fa59e6c19f44a"} Sep 29 21:30:03 crc kubenswrapper[4911]: I0929 21:30:03.223955 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319690-42m2v" Sep 29 21:30:03 crc kubenswrapper[4911]: I0929 21:30:03.265945 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9a3bdae8-c904-4591-bb7d-0e00a24975fb-secret-volume\") pod \"9a3bdae8-c904-4591-bb7d-0e00a24975fb\" (UID: \"9a3bdae8-c904-4591-bb7d-0e00a24975fb\") " Sep 29 21:30:03 crc kubenswrapper[4911]: I0929 21:30:03.276027 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9a3bdae8-c904-4591-bb7d-0e00a24975fb-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "9a3bdae8-c904-4591-bb7d-0e00a24975fb" (UID: "9a3bdae8-c904-4591-bb7d-0e00a24975fb"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:30:03 crc kubenswrapper[4911]: I0929 21:30:03.367159 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m6dv8\" (UniqueName: \"kubernetes.io/projected/9a3bdae8-c904-4591-bb7d-0e00a24975fb-kube-api-access-m6dv8\") pod \"9a3bdae8-c904-4591-bb7d-0e00a24975fb\" (UID: \"9a3bdae8-c904-4591-bb7d-0e00a24975fb\") " Sep 29 21:30:03 crc kubenswrapper[4911]: I0929 21:30:03.368059 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9a3bdae8-c904-4591-bb7d-0e00a24975fb-config-volume\") pod \"9a3bdae8-c904-4591-bb7d-0e00a24975fb\" (UID: \"9a3bdae8-c904-4591-bb7d-0e00a24975fb\") " Sep 29 21:30:03 crc kubenswrapper[4911]: I0929 21:30:03.368469 4911 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9a3bdae8-c904-4591-bb7d-0e00a24975fb-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 29 21:30:03 crc kubenswrapper[4911]: I0929 21:30:03.369019 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9a3bdae8-c904-4591-bb7d-0e00a24975fb-config-volume" (OuterVolumeSpecName: "config-volume") pod "9a3bdae8-c904-4591-bb7d-0e00a24975fb" (UID: "9a3bdae8-c904-4591-bb7d-0e00a24975fb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:30:03 crc kubenswrapper[4911]: I0929 21:30:03.371827 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9a3bdae8-c904-4591-bb7d-0e00a24975fb-kube-api-access-m6dv8" (OuterVolumeSpecName: "kube-api-access-m6dv8") pod "9a3bdae8-c904-4591-bb7d-0e00a24975fb" (UID: "9a3bdae8-c904-4591-bb7d-0e00a24975fb"). InnerVolumeSpecName "kube-api-access-m6dv8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:30:03 crc kubenswrapper[4911]: I0929 21:30:03.469430 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m6dv8\" (UniqueName: \"kubernetes.io/projected/9a3bdae8-c904-4591-bb7d-0e00a24975fb-kube-api-access-m6dv8\") on node \"crc\" DevicePath \"\"" Sep 29 21:30:03 crc kubenswrapper[4911]: I0929 21:30:03.469462 4911 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9a3bdae8-c904-4591-bb7d-0e00a24975fb-config-volume\") on node \"crc\" DevicePath \"\"" Sep 29 21:30:04 crc kubenswrapper[4911]: I0929 21:30:04.022847 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319690-42m2v" event={"ID":"9a3bdae8-c904-4591-bb7d-0e00a24975fb","Type":"ContainerDied","Data":"5a5904d232c4b0eb84442f600599fdd51ba1371d31cc88f335cb3fd1db844e5e"} Sep 29 21:30:04 crc kubenswrapper[4911]: I0929 21:30:04.022945 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5a5904d232c4b0eb84442f600599fdd51ba1371d31cc88f335cb3fd1db844e5e" Sep 29 21:30:04 crc kubenswrapper[4911]: I0929 21:30:04.022887 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319690-42m2v" Sep 29 21:30:55 crc kubenswrapper[4911]: I0929 21:30:55.211037 4911 patch_prober.go:28] interesting pod/machine-config-daemon-w647f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 21:30:55 crc kubenswrapper[4911]: I0929 21:30:55.211872 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 21:31:25 crc kubenswrapper[4911]: I0929 21:31:25.211615 4911 patch_prober.go:28] interesting pod/machine-config-daemon-w647f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 21:31:25 crc kubenswrapper[4911]: I0929 21:31:25.212840 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 21:31:55 crc kubenswrapper[4911]: I0929 21:31:55.211761 4911 patch_prober.go:28] interesting pod/machine-config-daemon-w647f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 21:31:55 crc kubenswrapper[4911]: I0929 21:31:55.212769 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 21:31:55 crc kubenswrapper[4911]: I0929 21:31:55.212882 4911 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-w647f" Sep 29 21:31:55 crc kubenswrapper[4911]: I0929 21:31:55.213830 4911 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e38c1fe31b443189675002028c28967c8680009a80ff20447fa8074033d6557e"} pod="openshift-machine-config-operator/machine-config-daemon-w647f" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 21:31:55 crc kubenswrapper[4911]: I0929 21:31:55.213936 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" containerName="machine-config-daemon" containerID="cri-o://e38c1fe31b443189675002028c28967c8680009a80ff20447fa8074033d6557e" gracePeriod=600 Sep 29 21:31:55 crc kubenswrapper[4911]: I0929 21:31:55.885844 4911 generic.go:334] "Generic (PLEG): container finished" podID="50640abc-40db-4390-82d1-f3cfc76da71c" containerID="e38c1fe31b443189675002028c28967c8680009a80ff20447fa8074033d6557e" exitCode=0 Sep 29 21:31:55 crc kubenswrapper[4911]: I0929 21:31:55.885847 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w647f" event={"ID":"50640abc-40db-4390-82d1-f3cfc76da71c","Type":"ContainerDied","Data":"e38c1fe31b443189675002028c28967c8680009a80ff20447fa8074033d6557e"} Sep 29 21:31:55 crc kubenswrapper[4911]: I0929 21:31:55.886568 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w647f" event={"ID":"50640abc-40db-4390-82d1-f3cfc76da71c","Type":"ContainerStarted","Data":"169a8ea1d84bd44d55b71bca47978a16d348f46e726c92490e98ec486b65a803"} Sep 29 21:31:55 crc kubenswrapper[4911]: I0929 21:31:55.886684 4911 scope.go:117] "RemoveContainer" containerID="568d099d7beea914cf9f9eb703e36dd179359d3f4406bae454334e39b0de79ca" Sep 29 21:33:04 crc kubenswrapper[4911]: I0929 21:33:04.461947 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-2nzpg"] Sep 29 21:33:04 crc kubenswrapper[4911]: E0929 21:33:04.462768 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a3bdae8-c904-4591-bb7d-0e00a24975fb" containerName="collect-profiles" Sep 29 21:33:04 crc kubenswrapper[4911]: I0929 21:33:04.462785 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a3bdae8-c904-4591-bb7d-0e00a24975fb" containerName="collect-profiles" Sep 29 21:33:04 crc kubenswrapper[4911]: I0929 21:33:04.462923 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="9a3bdae8-c904-4591-bb7d-0e00a24975fb" containerName="collect-profiles" Sep 29 21:33:04 crc kubenswrapper[4911]: I0929 21:33:04.463422 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-2nzpg" Sep 29 21:33:04 crc kubenswrapper[4911]: I0929 21:33:04.523152 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-2nzpg"] Sep 29 21:33:04 crc kubenswrapper[4911]: I0929 21:33:04.607250 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-562jr\" (UniqueName: \"kubernetes.io/projected/6aad87ea-f6b1-49ad-b9f1-0d5d39254ba0-kube-api-access-562jr\") pod \"image-registry-66df7c8f76-2nzpg\" (UID: \"6aad87ea-f6b1-49ad-b9f1-0d5d39254ba0\") " pod="openshift-image-registry/image-registry-66df7c8f76-2nzpg" Sep 29 21:33:04 crc kubenswrapper[4911]: I0929 21:33:04.607355 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-2nzpg\" (UID: \"6aad87ea-f6b1-49ad-b9f1-0d5d39254ba0\") " pod="openshift-image-registry/image-registry-66df7c8f76-2nzpg" Sep 29 21:33:04 crc kubenswrapper[4911]: I0929 21:33:04.607508 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/6aad87ea-f6b1-49ad-b9f1-0d5d39254ba0-ca-trust-extracted\") pod \"image-registry-66df7c8f76-2nzpg\" (UID: \"6aad87ea-f6b1-49ad-b9f1-0d5d39254ba0\") " pod="openshift-image-registry/image-registry-66df7c8f76-2nzpg" Sep 29 21:33:04 crc kubenswrapper[4911]: I0929 21:33:04.607572 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/6aad87ea-f6b1-49ad-b9f1-0d5d39254ba0-bound-sa-token\") pod \"image-registry-66df7c8f76-2nzpg\" (UID: \"6aad87ea-f6b1-49ad-b9f1-0d5d39254ba0\") " pod="openshift-image-registry/image-registry-66df7c8f76-2nzpg" Sep 29 21:33:04 crc kubenswrapper[4911]: I0929 21:33:04.607659 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/6aad87ea-f6b1-49ad-b9f1-0d5d39254ba0-registry-certificates\") pod \"image-registry-66df7c8f76-2nzpg\" (UID: \"6aad87ea-f6b1-49ad-b9f1-0d5d39254ba0\") " pod="openshift-image-registry/image-registry-66df7c8f76-2nzpg" Sep 29 21:33:04 crc kubenswrapper[4911]: I0929 21:33:04.607708 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6aad87ea-f6b1-49ad-b9f1-0d5d39254ba0-trusted-ca\") pod \"image-registry-66df7c8f76-2nzpg\" (UID: \"6aad87ea-f6b1-49ad-b9f1-0d5d39254ba0\") " pod="openshift-image-registry/image-registry-66df7c8f76-2nzpg" Sep 29 21:33:04 crc kubenswrapper[4911]: I0929 21:33:04.607739 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/6aad87ea-f6b1-49ad-b9f1-0d5d39254ba0-registry-tls\") pod \"image-registry-66df7c8f76-2nzpg\" (UID: \"6aad87ea-f6b1-49ad-b9f1-0d5d39254ba0\") " pod="openshift-image-registry/image-registry-66df7c8f76-2nzpg" Sep 29 21:33:04 crc kubenswrapper[4911]: I0929 21:33:04.607848 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/6aad87ea-f6b1-49ad-b9f1-0d5d39254ba0-installation-pull-secrets\") pod \"image-registry-66df7c8f76-2nzpg\" (UID: \"6aad87ea-f6b1-49ad-b9f1-0d5d39254ba0\") " pod="openshift-image-registry/image-registry-66df7c8f76-2nzpg" Sep 29 21:33:04 crc kubenswrapper[4911]: I0929 21:33:04.636834 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-2nzpg\" (UID: \"6aad87ea-f6b1-49ad-b9f1-0d5d39254ba0\") " pod="openshift-image-registry/image-registry-66df7c8f76-2nzpg" Sep 29 21:33:04 crc kubenswrapper[4911]: I0929 21:33:04.709032 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-562jr\" (UniqueName: \"kubernetes.io/projected/6aad87ea-f6b1-49ad-b9f1-0d5d39254ba0-kube-api-access-562jr\") pod \"image-registry-66df7c8f76-2nzpg\" (UID: \"6aad87ea-f6b1-49ad-b9f1-0d5d39254ba0\") " pod="openshift-image-registry/image-registry-66df7c8f76-2nzpg" Sep 29 21:33:04 crc kubenswrapper[4911]: I0929 21:33:04.709336 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/6aad87ea-f6b1-49ad-b9f1-0d5d39254ba0-ca-trust-extracted\") pod \"image-registry-66df7c8f76-2nzpg\" (UID: \"6aad87ea-f6b1-49ad-b9f1-0d5d39254ba0\") " pod="openshift-image-registry/image-registry-66df7c8f76-2nzpg" Sep 29 21:33:04 crc kubenswrapper[4911]: I0929 21:33:04.709380 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/6aad87ea-f6b1-49ad-b9f1-0d5d39254ba0-bound-sa-token\") pod \"image-registry-66df7c8f76-2nzpg\" (UID: \"6aad87ea-f6b1-49ad-b9f1-0d5d39254ba0\") " pod="openshift-image-registry/image-registry-66df7c8f76-2nzpg" Sep 29 21:33:04 crc kubenswrapper[4911]: I0929 21:33:04.709406 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/6aad87ea-f6b1-49ad-b9f1-0d5d39254ba0-registry-certificates\") pod \"image-registry-66df7c8f76-2nzpg\" (UID: \"6aad87ea-f6b1-49ad-b9f1-0d5d39254ba0\") " pod="openshift-image-registry/image-registry-66df7c8f76-2nzpg" Sep 29 21:33:04 crc kubenswrapper[4911]: I0929 21:33:04.709436 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6aad87ea-f6b1-49ad-b9f1-0d5d39254ba0-trusted-ca\") pod \"image-registry-66df7c8f76-2nzpg\" (UID: \"6aad87ea-f6b1-49ad-b9f1-0d5d39254ba0\") " pod="openshift-image-registry/image-registry-66df7c8f76-2nzpg" Sep 29 21:33:04 crc kubenswrapper[4911]: I0929 21:33:04.709456 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/6aad87ea-f6b1-49ad-b9f1-0d5d39254ba0-registry-tls\") pod \"image-registry-66df7c8f76-2nzpg\" (UID: \"6aad87ea-f6b1-49ad-b9f1-0d5d39254ba0\") " pod="openshift-image-registry/image-registry-66df7c8f76-2nzpg" Sep 29 21:33:04 crc kubenswrapper[4911]: I0929 21:33:04.709482 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/6aad87ea-f6b1-49ad-b9f1-0d5d39254ba0-installation-pull-secrets\") pod \"image-registry-66df7c8f76-2nzpg\" (UID: \"6aad87ea-f6b1-49ad-b9f1-0d5d39254ba0\") " pod="openshift-image-registry/image-registry-66df7c8f76-2nzpg" Sep 29 21:33:04 crc kubenswrapper[4911]: I0929 21:33:04.710864 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/6aad87ea-f6b1-49ad-b9f1-0d5d39254ba0-ca-trust-extracted\") pod \"image-registry-66df7c8f76-2nzpg\" (UID: \"6aad87ea-f6b1-49ad-b9f1-0d5d39254ba0\") " pod="openshift-image-registry/image-registry-66df7c8f76-2nzpg" Sep 29 21:33:04 crc kubenswrapper[4911]: I0929 21:33:04.711957 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/6aad87ea-f6b1-49ad-b9f1-0d5d39254ba0-registry-certificates\") pod \"image-registry-66df7c8f76-2nzpg\" (UID: \"6aad87ea-f6b1-49ad-b9f1-0d5d39254ba0\") " pod="openshift-image-registry/image-registry-66df7c8f76-2nzpg" Sep 29 21:33:04 crc kubenswrapper[4911]: I0929 21:33:04.712199 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6aad87ea-f6b1-49ad-b9f1-0d5d39254ba0-trusted-ca\") pod \"image-registry-66df7c8f76-2nzpg\" (UID: \"6aad87ea-f6b1-49ad-b9f1-0d5d39254ba0\") " pod="openshift-image-registry/image-registry-66df7c8f76-2nzpg" Sep 29 21:33:04 crc kubenswrapper[4911]: I0929 21:33:04.722760 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/6aad87ea-f6b1-49ad-b9f1-0d5d39254ba0-registry-tls\") pod \"image-registry-66df7c8f76-2nzpg\" (UID: \"6aad87ea-f6b1-49ad-b9f1-0d5d39254ba0\") " pod="openshift-image-registry/image-registry-66df7c8f76-2nzpg" Sep 29 21:33:04 crc kubenswrapper[4911]: I0929 21:33:04.725989 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/6aad87ea-f6b1-49ad-b9f1-0d5d39254ba0-installation-pull-secrets\") pod \"image-registry-66df7c8f76-2nzpg\" (UID: \"6aad87ea-f6b1-49ad-b9f1-0d5d39254ba0\") " pod="openshift-image-registry/image-registry-66df7c8f76-2nzpg" Sep 29 21:33:04 crc kubenswrapper[4911]: I0929 21:33:04.732352 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-562jr\" (UniqueName: \"kubernetes.io/projected/6aad87ea-f6b1-49ad-b9f1-0d5d39254ba0-kube-api-access-562jr\") pod \"image-registry-66df7c8f76-2nzpg\" (UID: \"6aad87ea-f6b1-49ad-b9f1-0d5d39254ba0\") " pod="openshift-image-registry/image-registry-66df7c8f76-2nzpg" Sep 29 21:33:04 crc kubenswrapper[4911]: I0929 21:33:04.738029 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/6aad87ea-f6b1-49ad-b9f1-0d5d39254ba0-bound-sa-token\") pod \"image-registry-66df7c8f76-2nzpg\" (UID: \"6aad87ea-f6b1-49ad-b9f1-0d5d39254ba0\") " pod="openshift-image-registry/image-registry-66df7c8f76-2nzpg" Sep 29 21:33:04 crc kubenswrapper[4911]: I0929 21:33:04.781758 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-2nzpg" Sep 29 21:33:05 crc kubenswrapper[4911]: I0929 21:33:05.304754 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-2nzpg"] Sep 29 21:33:05 crc kubenswrapper[4911]: I0929 21:33:05.407510 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-2nzpg" event={"ID":"6aad87ea-f6b1-49ad-b9f1-0d5d39254ba0","Type":"ContainerStarted","Data":"092b99db9cbba26e1631081953bb9163b60d04d6cca272cc2c8aaad5b3de9856"} Sep 29 21:33:06 crc kubenswrapper[4911]: I0929 21:33:06.418621 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-2nzpg" event={"ID":"6aad87ea-f6b1-49ad-b9f1-0d5d39254ba0","Type":"ContainerStarted","Data":"ee4cfb3fc78c3e75616da6f7c220c1ea905c3b394e970d8b722f83e2db56d40b"} Sep 29 21:33:06 crc kubenswrapper[4911]: I0929 21:33:06.418831 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-2nzpg" Sep 29 21:33:06 crc kubenswrapper[4911]: I0929 21:33:06.455725 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-2nzpg" podStartSLOduration=2.455687404 podStartE2EDuration="2.455687404s" podCreationTimestamp="2025-09-29 21:33:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:33:06.448339463 +0000 UTC m=+464.425452174" watchObservedRunningTime="2025-09-29 21:33:06.455687404 +0000 UTC m=+464.432800155" Sep 29 21:33:24 crc kubenswrapper[4911]: I0929 21:33:24.797379 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-2nzpg" Sep 29 21:33:24 crc kubenswrapper[4911]: I0929 21:33:24.879908 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-8djg4"] Sep 29 21:33:49 crc kubenswrapper[4911]: I0929 21:33:49.927653 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" podUID="4357f10f-dad3-4233-9d03-1cad6319e4a9" containerName="registry" containerID="cri-o://779d470e2b33d9d003084c46afb86c56f65c25a8d4c919970e47fa4313dae7fb" gracePeriod=30 Sep 29 21:33:50 crc kubenswrapper[4911]: I0929 21:33:50.388938 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:33:50 crc kubenswrapper[4911]: I0929 21:33:50.430717 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-46w52\" (UniqueName: \"kubernetes.io/projected/4357f10f-dad3-4233-9d03-1cad6319e4a9-kube-api-access-46w52\") pod \"4357f10f-dad3-4233-9d03-1cad6319e4a9\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " Sep 29 21:33:50 crc kubenswrapper[4911]: I0929 21:33:50.432100 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/4357f10f-dad3-4233-9d03-1cad6319e4a9-bound-sa-token\") pod \"4357f10f-dad3-4233-9d03-1cad6319e4a9\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " Sep 29 21:33:50 crc kubenswrapper[4911]: I0929 21:33:50.432168 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/4357f10f-dad3-4233-9d03-1cad6319e4a9-registry-certificates\") pod \"4357f10f-dad3-4233-9d03-1cad6319e4a9\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " Sep 29 21:33:50 crc kubenswrapper[4911]: I0929 21:33:50.432416 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"4357f10f-dad3-4233-9d03-1cad6319e4a9\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " Sep 29 21:33:50 crc kubenswrapper[4911]: I0929 21:33:50.432509 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4357f10f-dad3-4233-9d03-1cad6319e4a9-trusted-ca\") pod \"4357f10f-dad3-4233-9d03-1cad6319e4a9\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " Sep 29 21:33:50 crc kubenswrapper[4911]: I0929 21:33:50.432584 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/4357f10f-dad3-4233-9d03-1cad6319e4a9-registry-tls\") pod \"4357f10f-dad3-4233-9d03-1cad6319e4a9\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " Sep 29 21:33:50 crc kubenswrapper[4911]: I0929 21:33:50.432654 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/4357f10f-dad3-4233-9d03-1cad6319e4a9-ca-trust-extracted\") pod \"4357f10f-dad3-4233-9d03-1cad6319e4a9\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " Sep 29 21:33:50 crc kubenswrapper[4911]: I0929 21:33:50.432714 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/4357f10f-dad3-4233-9d03-1cad6319e4a9-installation-pull-secrets\") pod \"4357f10f-dad3-4233-9d03-1cad6319e4a9\" (UID: \"4357f10f-dad3-4233-9d03-1cad6319e4a9\") " Sep 29 21:33:50 crc kubenswrapper[4911]: I0929 21:33:50.433765 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4357f10f-dad3-4233-9d03-1cad6319e4a9-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "4357f10f-dad3-4233-9d03-1cad6319e4a9" (UID: "4357f10f-dad3-4233-9d03-1cad6319e4a9"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:33:50 crc kubenswrapper[4911]: I0929 21:33:50.434655 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4357f10f-dad3-4233-9d03-1cad6319e4a9-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "4357f10f-dad3-4233-9d03-1cad6319e4a9" (UID: "4357f10f-dad3-4233-9d03-1cad6319e4a9"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:33:50 crc kubenswrapper[4911]: I0929 21:33:50.440966 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4357f10f-dad3-4233-9d03-1cad6319e4a9-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "4357f10f-dad3-4233-9d03-1cad6319e4a9" (UID: "4357f10f-dad3-4233-9d03-1cad6319e4a9"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:33:50 crc kubenswrapper[4911]: I0929 21:33:50.441941 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4357f10f-dad3-4233-9d03-1cad6319e4a9-kube-api-access-46w52" (OuterVolumeSpecName: "kube-api-access-46w52") pod "4357f10f-dad3-4233-9d03-1cad6319e4a9" (UID: "4357f10f-dad3-4233-9d03-1cad6319e4a9"). InnerVolumeSpecName "kube-api-access-46w52". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:33:50 crc kubenswrapper[4911]: I0929 21:33:50.442572 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4357f10f-dad3-4233-9d03-1cad6319e4a9-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "4357f10f-dad3-4233-9d03-1cad6319e4a9" (UID: "4357f10f-dad3-4233-9d03-1cad6319e4a9"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:33:50 crc kubenswrapper[4911]: I0929 21:33:50.443164 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4357f10f-dad3-4233-9d03-1cad6319e4a9-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "4357f10f-dad3-4233-9d03-1cad6319e4a9" (UID: "4357f10f-dad3-4233-9d03-1cad6319e4a9"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:33:50 crc kubenswrapper[4911]: I0929 21:33:50.447195 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "4357f10f-dad3-4233-9d03-1cad6319e4a9" (UID: "4357f10f-dad3-4233-9d03-1cad6319e4a9"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Sep 29 21:33:50 crc kubenswrapper[4911]: I0929 21:33:50.468120 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4357f10f-dad3-4233-9d03-1cad6319e4a9-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "4357f10f-dad3-4233-9d03-1cad6319e4a9" (UID: "4357f10f-dad3-4233-9d03-1cad6319e4a9"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:33:50 crc kubenswrapper[4911]: I0929 21:33:50.534373 4911 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/4357f10f-dad3-4233-9d03-1cad6319e4a9-bound-sa-token\") on node \"crc\" DevicePath \"\"" Sep 29 21:33:50 crc kubenswrapper[4911]: I0929 21:33:50.534436 4911 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/4357f10f-dad3-4233-9d03-1cad6319e4a9-registry-certificates\") on node \"crc\" DevicePath \"\"" Sep 29 21:33:50 crc kubenswrapper[4911]: I0929 21:33:50.534462 4911 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4357f10f-dad3-4233-9d03-1cad6319e4a9-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 29 21:33:50 crc kubenswrapper[4911]: I0929 21:33:50.534480 4911 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/4357f10f-dad3-4233-9d03-1cad6319e4a9-registry-tls\") on node \"crc\" DevicePath \"\"" Sep 29 21:33:50 crc kubenswrapper[4911]: I0929 21:33:50.534500 4911 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/4357f10f-dad3-4233-9d03-1cad6319e4a9-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Sep 29 21:33:50 crc kubenswrapper[4911]: I0929 21:33:50.534518 4911 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/4357f10f-dad3-4233-9d03-1cad6319e4a9-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Sep 29 21:33:50 crc kubenswrapper[4911]: I0929 21:33:50.534536 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-46w52\" (UniqueName: \"kubernetes.io/projected/4357f10f-dad3-4233-9d03-1cad6319e4a9-kube-api-access-46w52\") on node \"crc\" DevicePath \"\"" Sep 29 21:33:50 crc kubenswrapper[4911]: I0929 21:33:50.744272 4911 generic.go:334] "Generic (PLEG): container finished" podID="4357f10f-dad3-4233-9d03-1cad6319e4a9" containerID="779d470e2b33d9d003084c46afb86c56f65c25a8d4c919970e47fa4313dae7fb" exitCode=0 Sep 29 21:33:50 crc kubenswrapper[4911]: I0929 21:33:50.744347 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" event={"ID":"4357f10f-dad3-4233-9d03-1cad6319e4a9","Type":"ContainerDied","Data":"779d470e2b33d9d003084c46afb86c56f65c25a8d4c919970e47fa4313dae7fb"} Sep 29 21:33:50 crc kubenswrapper[4911]: I0929 21:33:50.744618 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" event={"ID":"4357f10f-dad3-4233-9d03-1cad6319e4a9","Type":"ContainerDied","Data":"581ca7861123268b62b37013105aecb329f541e02bdb0476461070e874dff07b"} Sep 29 21:33:50 crc kubenswrapper[4911]: I0929 21:33:50.744373 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-8djg4" Sep 29 21:33:50 crc kubenswrapper[4911]: I0929 21:33:50.744720 4911 scope.go:117] "RemoveContainer" containerID="779d470e2b33d9d003084c46afb86c56f65c25a8d4c919970e47fa4313dae7fb" Sep 29 21:33:50 crc kubenswrapper[4911]: I0929 21:33:50.772113 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-8djg4"] Sep 29 21:33:50 crc kubenswrapper[4911]: I0929 21:33:50.789316 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-8djg4"] Sep 29 21:33:50 crc kubenswrapper[4911]: I0929 21:33:50.795005 4911 scope.go:117] "RemoveContainer" containerID="779d470e2b33d9d003084c46afb86c56f65c25a8d4c919970e47fa4313dae7fb" Sep 29 21:33:50 crc kubenswrapper[4911]: E0929 21:33:50.795951 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"779d470e2b33d9d003084c46afb86c56f65c25a8d4c919970e47fa4313dae7fb\": container with ID starting with 779d470e2b33d9d003084c46afb86c56f65c25a8d4c919970e47fa4313dae7fb not found: ID does not exist" containerID="779d470e2b33d9d003084c46afb86c56f65c25a8d4c919970e47fa4313dae7fb" Sep 29 21:33:50 crc kubenswrapper[4911]: I0929 21:33:50.796327 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"779d470e2b33d9d003084c46afb86c56f65c25a8d4c919970e47fa4313dae7fb"} err="failed to get container status \"779d470e2b33d9d003084c46afb86c56f65c25a8d4c919970e47fa4313dae7fb\": rpc error: code = NotFound desc = could not find container \"779d470e2b33d9d003084c46afb86c56f65c25a8d4c919970e47fa4313dae7fb\": container with ID starting with 779d470e2b33d9d003084c46afb86c56f65c25a8d4c919970e47fa4313dae7fb not found: ID does not exist" Sep 29 21:33:52 crc kubenswrapper[4911]: I0929 21:33:52.713252 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4357f10f-dad3-4233-9d03-1cad6319e4a9" path="/var/lib/kubelet/pods/4357f10f-dad3-4233-9d03-1cad6319e4a9/volumes" Sep 29 21:33:55 crc kubenswrapper[4911]: I0929 21:33:55.212033 4911 patch_prober.go:28] interesting pod/machine-config-daemon-w647f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 21:33:55 crc kubenswrapper[4911]: I0929 21:33:55.212205 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 21:34:17 crc kubenswrapper[4911]: I0929 21:34:17.759393 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-vlpxx"] Sep 29 21:34:17 crc kubenswrapper[4911]: E0929 21:34:17.760236 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4357f10f-dad3-4233-9d03-1cad6319e4a9" containerName="registry" Sep 29 21:34:17 crc kubenswrapper[4911]: I0929 21:34:17.760251 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="4357f10f-dad3-4233-9d03-1cad6319e4a9" containerName="registry" Sep 29 21:34:17 crc kubenswrapper[4911]: I0929 21:34:17.760361 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="4357f10f-dad3-4233-9d03-1cad6319e4a9" containerName="registry" Sep 29 21:34:17 crc kubenswrapper[4911]: I0929 21:34:17.760817 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-vlpxx" Sep 29 21:34:17 crc kubenswrapper[4911]: I0929 21:34:17.761708 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-5b446d88c5-7sbqq"] Sep 29 21:34:17 crc kubenswrapper[4911]: I0929 21:34:17.762497 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-7sbqq" Sep 29 21:34:17 crc kubenswrapper[4911]: I0929 21:34:17.771214 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Sep 29 21:34:17 crc kubenswrapper[4911]: I0929 21:34:17.771629 4911 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-lh79t" Sep 29 21:34:17 crc kubenswrapper[4911]: I0929 21:34:17.771870 4911 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-g89nk" Sep 29 21:34:17 crc kubenswrapper[4911]: I0929 21:34:17.772063 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Sep 29 21:34:17 crc kubenswrapper[4911]: I0929 21:34:17.779980 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-sglg6"] Sep 29 21:34:17 crc kubenswrapper[4911]: I0929 21:34:17.780960 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-sglg6" Sep 29 21:34:17 crc kubenswrapper[4911]: I0929 21:34:17.782621 4911 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-5q6vb" Sep 29 21:34:17 crc kubenswrapper[4911]: I0929 21:34:17.786073 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-vlpxx"] Sep 29 21:34:17 crc kubenswrapper[4911]: I0929 21:34:17.801442 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-7sbqq"] Sep 29 21:34:17 crc kubenswrapper[4911]: I0929 21:34:17.807284 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-sglg6"] Sep 29 21:34:17 crc kubenswrapper[4911]: I0929 21:34:17.945997 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xkj5c\" (UniqueName: \"kubernetes.io/projected/ee5b7990-71bd-4b37-8fa1-aaa3b1284320-kube-api-access-xkj5c\") pod \"cert-manager-webhook-5655c58dd6-sglg6\" (UID: \"ee5b7990-71bd-4b37-8fa1-aaa3b1284320\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-sglg6" Sep 29 21:34:17 crc kubenswrapper[4911]: I0929 21:34:17.946091 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wt6fd\" (UniqueName: \"kubernetes.io/projected/e997c16b-21df-4c5b-89e3-f45fec29191e-kube-api-access-wt6fd\") pod \"cert-manager-5b446d88c5-7sbqq\" (UID: \"e997c16b-21df-4c5b-89e3-f45fec29191e\") " pod="cert-manager/cert-manager-5b446d88c5-7sbqq" Sep 29 21:34:17 crc kubenswrapper[4911]: I0929 21:34:17.946136 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tgj5p\" (UniqueName: \"kubernetes.io/projected/77a3df58-e35e-45ad-a5bc-6fb3841ec955-kube-api-access-tgj5p\") pod \"cert-manager-cainjector-7f985d654d-vlpxx\" (UID: \"77a3df58-e35e-45ad-a5bc-6fb3841ec955\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-vlpxx" Sep 29 21:34:18 crc kubenswrapper[4911]: I0929 21:34:18.047990 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xkj5c\" (UniqueName: \"kubernetes.io/projected/ee5b7990-71bd-4b37-8fa1-aaa3b1284320-kube-api-access-xkj5c\") pod \"cert-manager-webhook-5655c58dd6-sglg6\" (UID: \"ee5b7990-71bd-4b37-8fa1-aaa3b1284320\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-sglg6" Sep 29 21:34:18 crc kubenswrapper[4911]: I0929 21:34:18.048114 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wt6fd\" (UniqueName: \"kubernetes.io/projected/e997c16b-21df-4c5b-89e3-f45fec29191e-kube-api-access-wt6fd\") pod \"cert-manager-5b446d88c5-7sbqq\" (UID: \"e997c16b-21df-4c5b-89e3-f45fec29191e\") " pod="cert-manager/cert-manager-5b446d88c5-7sbqq" Sep 29 21:34:18 crc kubenswrapper[4911]: I0929 21:34:18.048154 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tgj5p\" (UniqueName: \"kubernetes.io/projected/77a3df58-e35e-45ad-a5bc-6fb3841ec955-kube-api-access-tgj5p\") pod \"cert-manager-cainjector-7f985d654d-vlpxx\" (UID: \"77a3df58-e35e-45ad-a5bc-6fb3841ec955\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-vlpxx" Sep 29 21:34:18 crc kubenswrapper[4911]: I0929 21:34:18.067975 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xkj5c\" (UniqueName: \"kubernetes.io/projected/ee5b7990-71bd-4b37-8fa1-aaa3b1284320-kube-api-access-xkj5c\") pod \"cert-manager-webhook-5655c58dd6-sglg6\" (UID: \"ee5b7990-71bd-4b37-8fa1-aaa3b1284320\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-sglg6" Sep 29 21:34:18 crc kubenswrapper[4911]: I0929 21:34:18.073571 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tgj5p\" (UniqueName: \"kubernetes.io/projected/77a3df58-e35e-45ad-a5bc-6fb3841ec955-kube-api-access-tgj5p\") pod \"cert-manager-cainjector-7f985d654d-vlpxx\" (UID: \"77a3df58-e35e-45ad-a5bc-6fb3841ec955\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-vlpxx" Sep 29 21:34:18 crc kubenswrapper[4911]: I0929 21:34:18.074291 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wt6fd\" (UniqueName: \"kubernetes.io/projected/e997c16b-21df-4c5b-89e3-f45fec29191e-kube-api-access-wt6fd\") pod \"cert-manager-5b446d88c5-7sbqq\" (UID: \"e997c16b-21df-4c5b-89e3-f45fec29191e\") " pod="cert-manager/cert-manager-5b446d88c5-7sbqq" Sep 29 21:34:18 crc kubenswrapper[4911]: I0929 21:34:18.080987 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-vlpxx" Sep 29 21:34:18 crc kubenswrapper[4911]: I0929 21:34:18.090909 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-7sbqq" Sep 29 21:34:18 crc kubenswrapper[4911]: I0929 21:34:18.102241 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-sglg6" Sep 29 21:34:18 crc kubenswrapper[4911]: I0929 21:34:18.426211 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-sglg6"] Sep 29 21:34:18 crc kubenswrapper[4911]: W0929 21:34:18.434467 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podee5b7990_71bd_4b37_8fa1_aaa3b1284320.slice/crio-ea2179cd8944db15b4b107d1b9a256086c4ab8b976bf09f0c03565a49a85d94a WatchSource:0}: Error finding container ea2179cd8944db15b4b107d1b9a256086c4ab8b976bf09f0c03565a49a85d94a: Status 404 returned error can't find the container with id ea2179cd8944db15b4b107d1b9a256086c4ab8b976bf09f0c03565a49a85d94a Sep 29 21:34:18 crc kubenswrapper[4911]: I0929 21:34:18.437960 4911 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 21:34:18 crc kubenswrapper[4911]: I0929 21:34:18.559241 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-7sbqq"] Sep 29 21:34:18 crc kubenswrapper[4911]: W0929 21:34:18.565869 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode997c16b_21df_4c5b_89e3_f45fec29191e.slice/crio-397ca98e0e4d3cfb5ff23e95a6b74119e119ef5b44cf40e0fa9618e10ad630f6 WatchSource:0}: Error finding container 397ca98e0e4d3cfb5ff23e95a6b74119e119ef5b44cf40e0fa9618e10ad630f6: Status 404 returned error can't find the container with id 397ca98e0e4d3cfb5ff23e95a6b74119e119ef5b44cf40e0fa9618e10ad630f6 Sep 29 21:34:18 crc kubenswrapper[4911]: I0929 21:34:18.574838 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-vlpxx"] Sep 29 21:34:18 crc kubenswrapper[4911]: W0929 21:34:18.581266 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod77a3df58_e35e_45ad_a5bc_6fb3841ec955.slice/crio-23a45e29f8242c88f243f7942f294bc3a0debe7c66a41b5e4c098bcb74d298be WatchSource:0}: Error finding container 23a45e29f8242c88f243f7942f294bc3a0debe7c66a41b5e4c098bcb74d298be: Status 404 returned error can't find the container with id 23a45e29f8242c88f243f7942f294bc3a0debe7c66a41b5e4c098bcb74d298be Sep 29 21:34:18 crc kubenswrapper[4911]: I0929 21:34:18.953949 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-7sbqq" event={"ID":"e997c16b-21df-4c5b-89e3-f45fec29191e","Type":"ContainerStarted","Data":"397ca98e0e4d3cfb5ff23e95a6b74119e119ef5b44cf40e0fa9618e10ad630f6"} Sep 29 21:34:18 crc kubenswrapper[4911]: I0929 21:34:18.955724 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-vlpxx" event={"ID":"77a3df58-e35e-45ad-a5bc-6fb3841ec955","Type":"ContainerStarted","Data":"23a45e29f8242c88f243f7942f294bc3a0debe7c66a41b5e4c098bcb74d298be"} Sep 29 21:34:18 crc kubenswrapper[4911]: I0929 21:34:18.957604 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-sglg6" event={"ID":"ee5b7990-71bd-4b37-8fa1-aaa3b1284320","Type":"ContainerStarted","Data":"ea2179cd8944db15b4b107d1b9a256086c4ab8b976bf09f0c03565a49a85d94a"} Sep 29 21:34:20 crc kubenswrapper[4911]: I0929 21:34:20.972998 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-sglg6" event={"ID":"ee5b7990-71bd-4b37-8fa1-aaa3b1284320","Type":"ContainerStarted","Data":"1c4215b53997c44760fc73477c5f78ae9d5c28a4cf59c455dcd71dc5847c49fb"} Sep 29 21:34:20 crc kubenswrapper[4911]: I0929 21:34:20.973644 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-5655c58dd6-sglg6" Sep 29 21:34:22 crc kubenswrapper[4911]: I0929 21:34:22.726539 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-5655c58dd6-sglg6" podStartSLOduration=3.405407834 podStartE2EDuration="5.726516206s" podCreationTimestamp="2025-09-29 21:34:17 +0000 UTC" firstStartedPulling="2025-09-29 21:34:18.43766596 +0000 UTC m=+536.414778631" lastFinishedPulling="2025-09-29 21:34:20.758774322 +0000 UTC m=+538.735887003" observedRunningTime="2025-09-29 21:34:20.994173625 +0000 UTC m=+538.971286286" watchObservedRunningTime="2025-09-29 21:34:22.726516206 +0000 UTC m=+540.703628877" Sep 29 21:34:22 crc kubenswrapper[4911]: I0929 21:34:22.991191 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-vlpxx" event={"ID":"77a3df58-e35e-45ad-a5bc-6fb3841ec955","Type":"ContainerStarted","Data":"16e95242d0b6a1f60f1ff028373b206366fd2a4451bae063d2cba3bd34a22099"} Sep 29 21:34:22 crc kubenswrapper[4911]: I0929 21:34:22.995289 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-7sbqq" event={"ID":"e997c16b-21df-4c5b-89e3-f45fec29191e","Type":"ContainerStarted","Data":"393f1447b040147cc6f2948f246447148487438693a2fd9fd48bd77c43e1abf6"} Sep 29 21:34:23 crc kubenswrapper[4911]: I0929 21:34:23.013540 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7f985d654d-vlpxx" podStartSLOduration=2.418076148 podStartE2EDuration="6.013510415s" podCreationTimestamp="2025-09-29 21:34:17 +0000 UTC" firstStartedPulling="2025-09-29 21:34:18.584408999 +0000 UTC m=+536.561521670" lastFinishedPulling="2025-09-29 21:34:22.179843266 +0000 UTC m=+540.156955937" observedRunningTime="2025-09-29 21:34:23.012551354 +0000 UTC m=+540.989664035" watchObservedRunningTime="2025-09-29 21:34:23.013510415 +0000 UTC m=+540.990623116" Sep 29 21:34:25 crc kubenswrapper[4911]: I0929 21:34:25.211528 4911 patch_prober.go:28] interesting pod/machine-config-daemon-w647f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 21:34:25 crc kubenswrapper[4911]: I0929 21:34:25.212141 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.108180 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-5655c58dd6-sglg6" Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.135715 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-5b446d88c5-7sbqq" podStartSLOduration=7.528641935 podStartE2EDuration="11.135695286s" podCreationTimestamp="2025-09-29 21:34:17 +0000 UTC" firstStartedPulling="2025-09-29 21:34:18.571462763 +0000 UTC m=+536.548575434" lastFinishedPulling="2025-09-29 21:34:22.178516114 +0000 UTC m=+540.155628785" observedRunningTime="2025-09-29 21:34:23.04141111 +0000 UTC m=+541.018523791" watchObservedRunningTime="2025-09-29 21:34:28.135695286 +0000 UTC m=+546.112807967" Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.430413 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-9wxd8"] Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.431405 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" podUID="4e3aa70f-b0da-44c9-a850-96d4494b02fc" containerName="ovn-controller" containerID="cri-o://c62aa953604f4ce35ca351ecf4e03da067af96f004c3ba3ea5f3a54b89def33d" gracePeriod=30 Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.431470 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" podUID="4e3aa70f-b0da-44c9-a850-96d4494b02fc" containerName="nbdb" containerID="cri-o://db1941e9959584d925b6ac546c234ce0dfaa2960a1277f282b51deb5ee56ec87" gracePeriod=30 Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.431572 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" podUID="4e3aa70f-b0da-44c9-a850-96d4494b02fc" containerName="ovn-acl-logging" containerID="cri-o://c06f6555e780f2f9f69cabcd3e26595acfde3e973387ed7b10345c673c17f5d0" gracePeriod=30 Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.431536 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" podUID="4e3aa70f-b0da-44c9-a850-96d4494b02fc" containerName="kube-rbac-proxy-node" containerID="cri-o://3568f71a4a1762e0d9be8ce4c317471f3f99a7c5b92b7daeb9fbace778f7e66b" gracePeriod=30 Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.431608 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" podUID="4e3aa70f-b0da-44c9-a850-96d4494b02fc" containerName="northd" containerID="cri-o://e87c1e61cde26c6ae4496e13ebe8e46857c8656720399f3d89b9e9a75bb4681d" gracePeriod=30 Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.431513 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" podUID="4e3aa70f-b0da-44c9-a850-96d4494b02fc" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://00681163bfe474dabf977e0c43ac71148c2a472efd4bc57f3d145ba1134a74aa" gracePeriod=30 Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.431772 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" podUID="4e3aa70f-b0da-44c9-a850-96d4494b02fc" containerName="sbdb" containerID="cri-o://68ec54cc6e84d45b9550a2dcbd53b3ffc8cc97cb94da8d9961b29965af08b7e3" gracePeriod=30 Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.488550 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" podUID="4e3aa70f-b0da-44c9-a850-96d4494b02fc" containerName="ovnkube-controller" containerID="cri-o://aeace5fc3ce0a241b8c32a861db8955e1002329af2b76581fe4289bff5db9e3d" gracePeriod=30 Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.780636 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-9wxd8_4e3aa70f-b0da-44c9-a850-96d4494b02fc/ovnkube-controller/3.log" Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.783221 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-9wxd8_4e3aa70f-b0da-44c9-a850-96d4494b02fc/ovn-acl-logging/0.log" Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.784069 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-9wxd8_4e3aa70f-b0da-44c9-a850-96d4494b02fc/ovn-controller/0.log" Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.784549 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.844443 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-zl5lq"] Sep 29 21:34:28 crc kubenswrapper[4911]: E0929 21:34:28.844722 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e3aa70f-b0da-44c9-a850-96d4494b02fc" containerName="ovnkube-controller" Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.844738 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e3aa70f-b0da-44c9-a850-96d4494b02fc" containerName="ovnkube-controller" Sep 29 21:34:28 crc kubenswrapper[4911]: E0929 21:34:28.844747 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e3aa70f-b0da-44c9-a850-96d4494b02fc" containerName="ovn-controller" Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.844754 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e3aa70f-b0da-44c9-a850-96d4494b02fc" containerName="ovn-controller" Sep 29 21:34:28 crc kubenswrapper[4911]: E0929 21:34:28.844762 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e3aa70f-b0da-44c9-a850-96d4494b02fc" containerName="kube-rbac-proxy-node" Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.844769 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e3aa70f-b0da-44c9-a850-96d4494b02fc" containerName="kube-rbac-proxy-node" Sep 29 21:34:28 crc kubenswrapper[4911]: E0929 21:34:28.844781 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e3aa70f-b0da-44c9-a850-96d4494b02fc" containerName="ovnkube-controller" Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.844800 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e3aa70f-b0da-44c9-a850-96d4494b02fc" containerName="ovnkube-controller" Sep 29 21:34:28 crc kubenswrapper[4911]: E0929 21:34:28.844808 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e3aa70f-b0da-44c9-a850-96d4494b02fc" containerName="ovn-acl-logging" Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.844814 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e3aa70f-b0da-44c9-a850-96d4494b02fc" containerName="ovn-acl-logging" Sep 29 21:34:28 crc kubenswrapper[4911]: E0929 21:34:28.844824 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e3aa70f-b0da-44c9-a850-96d4494b02fc" containerName="kube-rbac-proxy-ovn-metrics" Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.844831 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e3aa70f-b0da-44c9-a850-96d4494b02fc" containerName="kube-rbac-proxy-ovn-metrics" Sep 29 21:34:28 crc kubenswrapper[4911]: E0929 21:34:28.844842 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e3aa70f-b0da-44c9-a850-96d4494b02fc" containerName="ovnkube-controller" Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.844848 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e3aa70f-b0da-44c9-a850-96d4494b02fc" containerName="ovnkube-controller" Sep 29 21:34:28 crc kubenswrapper[4911]: E0929 21:34:28.844855 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e3aa70f-b0da-44c9-a850-96d4494b02fc" containerName="nbdb" Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.844862 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e3aa70f-b0da-44c9-a850-96d4494b02fc" containerName="nbdb" Sep 29 21:34:28 crc kubenswrapper[4911]: E0929 21:34:28.844868 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e3aa70f-b0da-44c9-a850-96d4494b02fc" containerName="kubecfg-setup" Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.844877 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e3aa70f-b0da-44c9-a850-96d4494b02fc" containerName="kubecfg-setup" Sep 29 21:34:28 crc kubenswrapper[4911]: E0929 21:34:28.844885 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e3aa70f-b0da-44c9-a850-96d4494b02fc" containerName="ovnkube-controller" Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.844892 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e3aa70f-b0da-44c9-a850-96d4494b02fc" containerName="ovnkube-controller" Sep 29 21:34:28 crc kubenswrapper[4911]: E0929 21:34:28.844902 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e3aa70f-b0da-44c9-a850-96d4494b02fc" containerName="northd" Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.844909 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e3aa70f-b0da-44c9-a850-96d4494b02fc" containerName="northd" Sep 29 21:34:28 crc kubenswrapper[4911]: E0929 21:34:28.844920 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e3aa70f-b0da-44c9-a850-96d4494b02fc" containerName="sbdb" Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.844927 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e3aa70f-b0da-44c9-a850-96d4494b02fc" containerName="sbdb" Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.845039 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e3aa70f-b0da-44c9-a850-96d4494b02fc" containerName="ovnkube-controller" Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.845050 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e3aa70f-b0da-44c9-a850-96d4494b02fc" containerName="kube-rbac-proxy-ovn-metrics" Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.845062 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e3aa70f-b0da-44c9-a850-96d4494b02fc" containerName="ovn-controller" Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.845070 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e3aa70f-b0da-44c9-a850-96d4494b02fc" containerName="ovn-acl-logging" Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.845078 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e3aa70f-b0da-44c9-a850-96d4494b02fc" containerName="nbdb" Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.845087 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e3aa70f-b0da-44c9-a850-96d4494b02fc" containerName="sbdb" Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.845095 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e3aa70f-b0da-44c9-a850-96d4494b02fc" containerName="ovnkube-controller" Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.845103 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e3aa70f-b0da-44c9-a850-96d4494b02fc" containerName="ovnkube-controller" Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.845110 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e3aa70f-b0da-44c9-a850-96d4494b02fc" containerName="northd" Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.845119 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e3aa70f-b0da-44c9-a850-96d4494b02fc" containerName="kube-rbac-proxy-node" Sep 29 21:34:28 crc kubenswrapper[4911]: E0929 21:34:28.845235 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e3aa70f-b0da-44c9-a850-96d4494b02fc" containerName="ovnkube-controller" Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.845242 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e3aa70f-b0da-44c9-a850-96d4494b02fc" containerName="ovnkube-controller" Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.845364 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e3aa70f-b0da-44c9-a850-96d4494b02fc" containerName="ovnkube-controller" Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.845375 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e3aa70f-b0da-44c9-a850-96d4494b02fc" containerName="ovnkube-controller" Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.847547 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.929955 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-log-socket\") pod \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.930268 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-run-systemd\") pod \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.930371 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-run-ovn\") pod \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.930472 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/4e3aa70f-b0da-44c9-a850-96d4494b02fc-ovn-node-metrics-cert\") pod \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.930558 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-host-run-netns\") pod \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.930661 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/4e3aa70f-b0da-44c9-a850-96d4494b02fc-ovnkube-script-lib\") pod \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.930770 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-node-log\") pod \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.930901 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-host-slash\") pod \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.930997 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-etc-openvswitch\") pod \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.931100 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-host-run-ovn-kubernetes\") pod \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.931186 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-host-cni-netd\") pod \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.931279 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-host-var-lib-cni-networks-ovn-kubernetes\") pod \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.931364 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-host-kubelet\") pod \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.931461 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/4e3aa70f-b0da-44c9-a850-96d4494b02fc-ovnkube-config\") pod \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.931660 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-run-openvswitch\") pod \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.931772 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/4e3aa70f-b0da-44c9-a850-96d4494b02fc-env-overrides\") pod \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.931904 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-systemd-units\") pod \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.932007 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnzb7\" (UniqueName: \"kubernetes.io/projected/4e3aa70f-b0da-44c9-a850-96d4494b02fc-kube-api-access-rnzb7\") pod \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.931926 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "4e3aa70f-b0da-44c9-a850-96d4494b02fc" (UID: "4e3aa70f-b0da-44c9-a850-96d4494b02fc"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.931969 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "4e3aa70f-b0da-44c9-a850-96d4494b02fc" (UID: "4e3aa70f-b0da-44c9-a850-96d4494b02fc"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.931989 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "4e3aa70f-b0da-44c9-a850-96d4494b02fc" (UID: "4e3aa70f-b0da-44c9-a850-96d4494b02fc"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.932019 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "4e3aa70f-b0da-44c9-a850-96d4494b02fc" (UID: "4e3aa70f-b0da-44c9-a850-96d4494b02fc"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.932041 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "4e3aa70f-b0da-44c9-a850-96d4494b02fc" (UID: "4e3aa70f-b0da-44c9-a850-96d4494b02fc"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.932079 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-log-socket" (OuterVolumeSpecName: "log-socket") pod "4e3aa70f-b0da-44c9-a850-96d4494b02fc" (UID: "4e3aa70f-b0da-44c9-a850-96d4494b02fc"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.932066 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "4e3aa70f-b0da-44c9-a850-96d4494b02fc" (UID: "4e3aa70f-b0da-44c9-a850-96d4494b02fc"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.932116 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "4e3aa70f-b0da-44c9-a850-96d4494b02fc" (UID: "4e3aa70f-b0da-44c9-a850-96d4494b02fc"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.932187 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "4e3aa70f-b0da-44c9-a850-96d4494b02fc" (UID: "4e3aa70f-b0da-44c9-a850-96d4494b02fc"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.932116 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-host-cni-bin\") pod \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.932356 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-var-lib-openvswitch\") pod \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\" (UID: \"4e3aa70f-b0da-44c9-a850-96d4494b02fc\") " Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.932515 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4e3aa70f-b0da-44c9-a850-96d4494b02fc-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "4e3aa70f-b0da-44c9-a850-96d4494b02fc" (UID: "4e3aa70f-b0da-44c9-a850-96d4494b02fc"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.932535 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4e3aa70f-b0da-44c9-a850-96d4494b02fc-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "4e3aa70f-b0da-44c9-a850-96d4494b02fc" (UID: "4e3aa70f-b0da-44c9-a850-96d4494b02fc"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.932579 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-host-slash" (OuterVolumeSpecName: "host-slash") pod "4e3aa70f-b0da-44c9-a850-96d4494b02fc" (UID: "4e3aa70f-b0da-44c9-a850-96d4494b02fc"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.932589 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-node-log" (OuterVolumeSpecName: "node-log") pod "4e3aa70f-b0da-44c9-a850-96d4494b02fc" (UID: "4e3aa70f-b0da-44c9-a850-96d4494b02fc"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.932623 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "4e3aa70f-b0da-44c9-a850-96d4494b02fc" (UID: "4e3aa70f-b0da-44c9-a850-96d4494b02fc"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.932635 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "4e3aa70f-b0da-44c9-a850-96d4494b02fc" (UID: "4e3aa70f-b0da-44c9-a850-96d4494b02fc"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.932651 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4e3aa70f-b0da-44c9-a850-96d4494b02fc-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "4e3aa70f-b0da-44c9-a850-96d4494b02fc" (UID: "4e3aa70f-b0da-44c9-a850-96d4494b02fc"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.932893 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "4e3aa70f-b0da-44c9-a850-96d4494b02fc" (UID: "4e3aa70f-b0da-44c9-a850-96d4494b02fc"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.933027 4911 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.933065 4911 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.933087 4911 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-host-cni-netd\") on node \"crc\" DevicePath \"\"" Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.933109 4911 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.933129 4911 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-host-kubelet\") on node \"crc\" DevicePath \"\"" Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.933148 4911 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/4e3aa70f-b0da-44c9-a850-96d4494b02fc-ovnkube-config\") on node \"crc\" DevicePath \"\"" Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.933165 4911 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-run-openvswitch\") on node \"crc\" DevicePath \"\"" Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.933183 4911 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/4e3aa70f-b0da-44c9-a850-96d4494b02fc-env-overrides\") on node \"crc\" DevicePath \"\"" Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.933199 4911 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-systemd-units\") on node \"crc\" DevicePath \"\"" Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.933215 4911 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.933232 4911 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-log-socket\") on node \"crc\" DevicePath \"\"" Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.933250 4911 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-run-ovn\") on node \"crc\" DevicePath \"\"" Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.933267 4911 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-host-run-netns\") on node \"crc\" DevicePath \"\"" Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.933284 4911 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/4e3aa70f-b0da-44c9-a850-96d4494b02fc-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.933301 4911 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-node-log\") on node \"crc\" DevicePath \"\"" Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.933317 4911 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-host-slash\") on node \"crc\" DevicePath \"\"" Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.936464 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e3aa70f-b0da-44c9-a850-96d4494b02fc-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "4e3aa70f-b0da-44c9-a850-96d4494b02fc" (UID: "4e3aa70f-b0da-44c9-a850-96d4494b02fc"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.936501 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e3aa70f-b0da-44c9-a850-96d4494b02fc-kube-api-access-rnzb7" (OuterVolumeSpecName: "kube-api-access-rnzb7") pod "4e3aa70f-b0da-44c9-a850-96d4494b02fc" (UID: "4e3aa70f-b0da-44c9-a850-96d4494b02fc"). InnerVolumeSpecName "kube-api-access-rnzb7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:34:28 crc kubenswrapper[4911]: I0929 21:34:28.943928 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "4e3aa70f-b0da-44c9-a850-96d4494b02fc" (UID: "4e3aa70f-b0da-44c9-a850-96d4494b02fc"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.034560 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/b4639d26-2c65-4332-a8fb-fd5b22a6e67c-run-systemd\") pod \"ovnkube-node-zl5lq\" (UID: \"b4639d26-2c65-4332-a8fb-fd5b22a6e67c\") " pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.034718 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/b4639d26-2c65-4332-a8fb-fd5b22a6e67c-host-kubelet\") pod \"ovnkube-node-zl5lq\" (UID: \"b4639d26-2c65-4332-a8fb-fd5b22a6e67c\") " pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.034836 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/b4639d26-2c65-4332-a8fb-fd5b22a6e67c-host-run-netns\") pod \"ovnkube-node-zl5lq\" (UID: \"b4639d26-2c65-4332-a8fb-fd5b22a6e67c\") " pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.034947 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b4639d26-2c65-4332-a8fb-fd5b22a6e67c-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-zl5lq\" (UID: \"b4639d26-2c65-4332-a8fb-fd5b22a6e67c\") " pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.035011 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/b4639d26-2c65-4332-a8fb-fd5b22a6e67c-host-cni-bin\") pod \"ovnkube-node-zl5lq\" (UID: \"b4639d26-2c65-4332-a8fb-fd5b22a6e67c\") " pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.035069 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b4639d26-2c65-4332-a8fb-fd5b22a6e67c-run-openvswitch\") pod \"ovnkube-node-zl5lq\" (UID: \"b4639d26-2c65-4332-a8fb-fd5b22a6e67c\") " pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.035127 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pd5wf\" (UniqueName: \"kubernetes.io/projected/b4639d26-2c65-4332-a8fb-fd5b22a6e67c-kube-api-access-pd5wf\") pod \"ovnkube-node-zl5lq\" (UID: \"b4639d26-2c65-4332-a8fb-fd5b22a6e67c\") " pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.035306 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b4639d26-2c65-4332-a8fb-fd5b22a6e67c-ovn-node-metrics-cert\") pod \"ovnkube-node-zl5lq\" (UID: \"b4639d26-2c65-4332-a8fb-fd5b22a6e67c\") " pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.035351 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/b4639d26-2c65-4332-a8fb-fd5b22a6e67c-ovnkube-script-lib\") pod \"ovnkube-node-zl5lq\" (UID: \"b4639d26-2c65-4332-a8fb-fd5b22a6e67c\") " pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.035377 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b4639d26-2c65-4332-a8fb-fd5b22a6e67c-var-lib-openvswitch\") pod \"ovnkube-node-zl5lq\" (UID: \"b4639d26-2c65-4332-a8fb-fd5b22a6e67c\") " pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.035540 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b4639d26-2c65-4332-a8fb-fd5b22a6e67c-host-run-ovn-kubernetes\") pod \"ovnkube-node-zl5lq\" (UID: \"b4639d26-2c65-4332-a8fb-fd5b22a6e67c\") " pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.035709 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/b4639d26-2c65-4332-a8fb-fd5b22a6e67c-host-slash\") pod \"ovnkube-node-zl5lq\" (UID: \"b4639d26-2c65-4332-a8fb-fd5b22a6e67c\") " pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.035898 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/b4639d26-2c65-4332-a8fb-fd5b22a6e67c-host-cni-netd\") pod \"ovnkube-node-zl5lq\" (UID: \"b4639d26-2c65-4332-a8fb-fd5b22a6e67c\") " pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.036045 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/b4639d26-2c65-4332-a8fb-fd5b22a6e67c-systemd-units\") pod \"ovnkube-node-zl5lq\" (UID: \"b4639d26-2c65-4332-a8fb-fd5b22a6e67c\") " pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.036142 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/b4639d26-2c65-4332-a8fb-fd5b22a6e67c-ovnkube-config\") pod \"ovnkube-node-zl5lq\" (UID: \"b4639d26-2c65-4332-a8fb-fd5b22a6e67c\") " pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.036238 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/b4639d26-2c65-4332-a8fb-fd5b22a6e67c-run-ovn\") pod \"ovnkube-node-zl5lq\" (UID: \"b4639d26-2c65-4332-a8fb-fd5b22a6e67c\") " pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.036331 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/b4639d26-2c65-4332-a8fb-fd5b22a6e67c-node-log\") pod \"ovnkube-node-zl5lq\" (UID: \"b4639d26-2c65-4332-a8fb-fd5b22a6e67c\") " pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.036458 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b4639d26-2c65-4332-a8fb-fd5b22a6e67c-etc-openvswitch\") pod \"ovnkube-node-zl5lq\" (UID: \"b4639d26-2c65-4332-a8fb-fd5b22a6e67c\") " pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.036575 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/b4639d26-2c65-4332-a8fb-fd5b22a6e67c-log-socket\") pod \"ovnkube-node-zl5lq\" (UID: \"b4639d26-2c65-4332-a8fb-fd5b22a6e67c\") " pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.036738 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/b4639d26-2c65-4332-a8fb-fd5b22a6e67c-env-overrides\") pod \"ovnkube-node-zl5lq\" (UID: \"b4639d26-2c65-4332-a8fb-fd5b22a6e67c\") " pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.037052 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnzb7\" (UniqueName: \"kubernetes.io/projected/4e3aa70f-b0da-44c9-a850-96d4494b02fc-kube-api-access-rnzb7\") on node \"crc\" DevicePath \"\"" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.037133 4911 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-host-cni-bin\") on node \"crc\" DevicePath \"\"" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.037696 4911 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/4e3aa70f-b0da-44c9-a850-96d4494b02fc-run-systemd\") on node \"crc\" DevicePath \"\"" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.037761 4911 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/4e3aa70f-b0da-44c9-a850-96d4494b02fc-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.038917 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-9wxd8_4e3aa70f-b0da-44c9-a850-96d4494b02fc/ovnkube-controller/3.log" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.041264 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-9wxd8_4e3aa70f-b0da-44c9-a850-96d4494b02fc/ovn-acl-logging/0.log" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.041644 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-9wxd8_4e3aa70f-b0da-44c9-a850-96d4494b02fc/ovn-controller/0.log" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.041909 4911 generic.go:334] "Generic (PLEG): container finished" podID="4e3aa70f-b0da-44c9-a850-96d4494b02fc" containerID="aeace5fc3ce0a241b8c32a861db8955e1002329af2b76581fe4289bff5db9e3d" exitCode=0 Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.041933 4911 generic.go:334] "Generic (PLEG): container finished" podID="4e3aa70f-b0da-44c9-a850-96d4494b02fc" containerID="68ec54cc6e84d45b9550a2dcbd53b3ffc8cc97cb94da8d9961b29965af08b7e3" exitCode=0 Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.041940 4911 generic.go:334] "Generic (PLEG): container finished" podID="4e3aa70f-b0da-44c9-a850-96d4494b02fc" containerID="db1941e9959584d925b6ac546c234ce0dfaa2960a1277f282b51deb5ee56ec87" exitCode=0 Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.041947 4911 generic.go:334] "Generic (PLEG): container finished" podID="4e3aa70f-b0da-44c9-a850-96d4494b02fc" containerID="e87c1e61cde26c6ae4496e13ebe8e46857c8656720399f3d89b9e9a75bb4681d" exitCode=0 Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.041953 4911 generic.go:334] "Generic (PLEG): container finished" podID="4e3aa70f-b0da-44c9-a850-96d4494b02fc" containerID="00681163bfe474dabf977e0c43ac71148c2a472efd4bc57f3d145ba1134a74aa" exitCode=0 Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.041961 4911 generic.go:334] "Generic (PLEG): container finished" podID="4e3aa70f-b0da-44c9-a850-96d4494b02fc" containerID="3568f71a4a1762e0d9be8ce4c317471f3f99a7c5b92b7daeb9fbace778f7e66b" exitCode=0 Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.041969 4911 generic.go:334] "Generic (PLEG): container finished" podID="4e3aa70f-b0da-44c9-a850-96d4494b02fc" containerID="c06f6555e780f2f9f69cabcd3e26595acfde3e973387ed7b10345c673c17f5d0" exitCode=143 Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.041977 4911 generic.go:334] "Generic (PLEG): container finished" podID="4e3aa70f-b0da-44c9-a850-96d4494b02fc" containerID="c62aa953604f4ce35ca351ecf4e03da067af96f004c3ba3ea5f3a54b89def33d" exitCode=143 Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.042032 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" event={"ID":"4e3aa70f-b0da-44c9-a850-96d4494b02fc","Type":"ContainerDied","Data":"aeace5fc3ce0a241b8c32a861db8955e1002329af2b76581fe4289bff5db9e3d"} Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.042100 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.042143 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" event={"ID":"4e3aa70f-b0da-44c9-a850-96d4494b02fc","Type":"ContainerDied","Data":"68ec54cc6e84d45b9550a2dcbd53b3ffc8cc97cb94da8d9961b29965af08b7e3"} Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.042218 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" event={"ID":"4e3aa70f-b0da-44c9-a850-96d4494b02fc","Type":"ContainerDied","Data":"db1941e9959584d925b6ac546c234ce0dfaa2960a1277f282b51deb5ee56ec87"} Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.042256 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" event={"ID":"4e3aa70f-b0da-44c9-a850-96d4494b02fc","Type":"ContainerDied","Data":"e87c1e61cde26c6ae4496e13ebe8e46857c8656720399f3d89b9e9a75bb4681d"} Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.042287 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" event={"ID":"4e3aa70f-b0da-44c9-a850-96d4494b02fc","Type":"ContainerDied","Data":"00681163bfe474dabf977e0c43ac71148c2a472efd4bc57f3d145ba1134a74aa"} Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.042313 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" event={"ID":"4e3aa70f-b0da-44c9-a850-96d4494b02fc","Type":"ContainerDied","Data":"3568f71a4a1762e0d9be8ce4c317471f3f99a7c5b92b7daeb9fbace778f7e66b"} Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.042342 4911 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9af670a0090bf77a03f27eaa0928873b2cbb836a1ac9bd6e176711e06eff996f"} Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.042371 4911 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"68ec54cc6e84d45b9550a2dcbd53b3ffc8cc97cb94da8d9961b29965af08b7e3"} Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.042389 4911 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"db1941e9959584d925b6ac546c234ce0dfaa2960a1277f282b51deb5ee56ec87"} Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.042404 4911 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e87c1e61cde26c6ae4496e13ebe8e46857c8656720399f3d89b9e9a75bb4681d"} Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.042419 4911 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"00681163bfe474dabf977e0c43ac71148c2a472efd4bc57f3d145ba1134a74aa"} Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.042433 4911 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3568f71a4a1762e0d9be8ce4c317471f3f99a7c5b92b7daeb9fbace778f7e66b"} Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.042448 4911 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c06f6555e780f2f9f69cabcd3e26595acfde3e973387ed7b10345c673c17f5d0"} Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.042462 4911 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c62aa953604f4ce35ca351ecf4e03da067af96f004c3ba3ea5f3a54b89def33d"} Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.042477 4911 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b"} Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.042502 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" event={"ID":"4e3aa70f-b0da-44c9-a850-96d4494b02fc","Type":"ContainerDied","Data":"c06f6555e780f2f9f69cabcd3e26595acfde3e973387ed7b10345c673c17f5d0"} Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.042527 4911 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"aeace5fc3ce0a241b8c32a861db8955e1002329af2b76581fe4289bff5db9e3d"} Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.042548 4911 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9af670a0090bf77a03f27eaa0928873b2cbb836a1ac9bd6e176711e06eff996f"} Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.042562 4911 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"68ec54cc6e84d45b9550a2dcbd53b3ffc8cc97cb94da8d9961b29965af08b7e3"} Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.042577 4911 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"db1941e9959584d925b6ac546c234ce0dfaa2960a1277f282b51deb5ee56ec87"} Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.042590 4911 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e87c1e61cde26c6ae4496e13ebe8e46857c8656720399f3d89b9e9a75bb4681d"} Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.042605 4911 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"00681163bfe474dabf977e0c43ac71148c2a472efd4bc57f3d145ba1134a74aa"} Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.042643 4911 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3568f71a4a1762e0d9be8ce4c317471f3f99a7c5b92b7daeb9fbace778f7e66b"} Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.042657 4911 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c06f6555e780f2f9f69cabcd3e26595acfde3e973387ed7b10345c673c17f5d0"} Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.042673 4911 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c62aa953604f4ce35ca351ecf4e03da067af96f004c3ba3ea5f3a54b89def33d"} Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.042687 4911 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b"} Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.042707 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" event={"ID":"4e3aa70f-b0da-44c9-a850-96d4494b02fc","Type":"ContainerDied","Data":"c62aa953604f4ce35ca351ecf4e03da067af96f004c3ba3ea5f3a54b89def33d"} Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.042733 4911 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"aeace5fc3ce0a241b8c32a861db8955e1002329af2b76581fe4289bff5db9e3d"} Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.042751 4911 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9af670a0090bf77a03f27eaa0928873b2cbb836a1ac9bd6e176711e06eff996f"} Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.042766 4911 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"68ec54cc6e84d45b9550a2dcbd53b3ffc8cc97cb94da8d9961b29965af08b7e3"} Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.042780 4911 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"db1941e9959584d925b6ac546c234ce0dfaa2960a1277f282b51deb5ee56ec87"} Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.042826 4911 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e87c1e61cde26c6ae4496e13ebe8e46857c8656720399f3d89b9e9a75bb4681d"} Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.042844 4911 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"00681163bfe474dabf977e0c43ac71148c2a472efd4bc57f3d145ba1134a74aa"} Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.042860 4911 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3568f71a4a1762e0d9be8ce4c317471f3f99a7c5b92b7daeb9fbace778f7e66b"} Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.042875 4911 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c06f6555e780f2f9f69cabcd3e26595acfde3e973387ed7b10345c673c17f5d0"} Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.042890 4911 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c62aa953604f4ce35ca351ecf4e03da067af96f004c3ba3ea5f3a54b89def33d"} Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.042905 4911 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b"} Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.042929 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9wxd8" event={"ID":"4e3aa70f-b0da-44c9-a850-96d4494b02fc","Type":"ContainerDied","Data":"3461e0c266fae127b8592fa27baa4fe782aa65ed4a243a7fd9b25a0e2f8a6755"} Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.042956 4911 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"aeace5fc3ce0a241b8c32a861db8955e1002329af2b76581fe4289bff5db9e3d"} Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.042975 4911 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9af670a0090bf77a03f27eaa0928873b2cbb836a1ac9bd6e176711e06eff996f"} Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.042990 4911 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"68ec54cc6e84d45b9550a2dcbd53b3ffc8cc97cb94da8d9961b29965af08b7e3"} Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.043005 4911 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"db1941e9959584d925b6ac546c234ce0dfaa2960a1277f282b51deb5ee56ec87"} Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.043023 4911 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e87c1e61cde26c6ae4496e13ebe8e46857c8656720399f3d89b9e9a75bb4681d"} Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.043038 4911 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"00681163bfe474dabf977e0c43ac71148c2a472efd4bc57f3d145ba1134a74aa"} Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.043052 4911 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3568f71a4a1762e0d9be8ce4c317471f3f99a7c5b92b7daeb9fbace778f7e66b"} Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.043066 4911 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c06f6555e780f2f9f69cabcd3e26595acfde3e973387ed7b10345c673c17f5d0"} Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.043080 4911 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c62aa953604f4ce35ca351ecf4e03da067af96f004c3ba3ea5f3a54b89def33d"} Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.043094 4911 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b"} Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.042530 4911 scope.go:117] "RemoveContainer" containerID="aeace5fc3ce0a241b8c32a861db8955e1002329af2b76581fe4289bff5db9e3d" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.043396 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-lrfbg_1179c900-e866-4c5a-bb06-6032cc03a075/kube-multus/2.log" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.044066 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-lrfbg_1179c900-e866-4c5a-bb06-6032cc03a075/kube-multus/1.log" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.044133 4911 generic.go:334] "Generic (PLEG): container finished" podID="1179c900-e866-4c5a-bb06-6032cc03a075" containerID="894308c4aead3d39450f470850392668331de7f234bd6595c49ea96fa39181d8" exitCode=2 Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.044179 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-lrfbg" event={"ID":"1179c900-e866-4c5a-bb06-6032cc03a075","Type":"ContainerDied","Data":"894308c4aead3d39450f470850392668331de7f234bd6595c49ea96fa39181d8"} Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.044211 4911 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"bc7df21f84f39b5c19f6039c6a102c478f158684a5952a968322654eecec14fd"} Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.044916 4911 scope.go:117] "RemoveContainer" containerID="894308c4aead3d39450f470850392668331de7f234bd6595c49ea96fa39181d8" Sep 29 21:34:29 crc kubenswrapper[4911]: E0929 21:34:29.045410 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-lrfbg_openshift-multus(1179c900-e866-4c5a-bb06-6032cc03a075)\"" pod="openshift-multus/multus-lrfbg" podUID="1179c900-e866-4c5a-bb06-6032cc03a075" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.064671 4911 scope.go:117] "RemoveContainer" containerID="9af670a0090bf77a03f27eaa0928873b2cbb836a1ac9bd6e176711e06eff996f" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.097143 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-9wxd8"] Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.101692 4911 scope.go:117] "RemoveContainer" containerID="68ec54cc6e84d45b9550a2dcbd53b3ffc8cc97cb94da8d9961b29965af08b7e3" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.106495 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-9wxd8"] Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.118536 4911 scope.go:117] "RemoveContainer" containerID="db1941e9959584d925b6ac546c234ce0dfaa2960a1277f282b51deb5ee56ec87" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.130942 4911 scope.go:117] "RemoveContainer" containerID="e87c1e61cde26c6ae4496e13ebe8e46857c8656720399f3d89b9e9a75bb4681d" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.139985 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b4639d26-2c65-4332-a8fb-fd5b22a6e67c-var-lib-openvswitch\") pod \"ovnkube-node-zl5lq\" (UID: \"b4639d26-2c65-4332-a8fb-fd5b22a6e67c\") " pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.140041 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b4639d26-2c65-4332-a8fb-fd5b22a6e67c-host-run-ovn-kubernetes\") pod \"ovnkube-node-zl5lq\" (UID: \"b4639d26-2c65-4332-a8fb-fd5b22a6e67c\") " pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.140076 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/b4639d26-2c65-4332-a8fb-fd5b22a6e67c-host-slash\") pod \"ovnkube-node-zl5lq\" (UID: \"b4639d26-2c65-4332-a8fb-fd5b22a6e67c\") " pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.140082 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b4639d26-2c65-4332-a8fb-fd5b22a6e67c-var-lib-openvswitch\") pod \"ovnkube-node-zl5lq\" (UID: \"b4639d26-2c65-4332-a8fb-fd5b22a6e67c\") " pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.140116 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/b4639d26-2c65-4332-a8fb-fd5b22a6e67c-host-cni-netd\") pod \"ovnkube-node-zl5lq\" (UID: \"b4639d26-2c65-4332-a8fb-fd5b22a6e67c\") " pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.140169 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/b4639d26-2c65-4332-a8fb-fd5b22a6e67c-host-cni-netd\") pod \"ovnkube-node-zl5lq\" (UID: \"b4639d26-2c65-4332-a8fb-fd5b22a6e67c\") " pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.140190 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/b4639d26-2c65-4332-a8fb-fd5b22a6e67c-systemd-units\") pod \"ovnkube-node-zl5lq\" (UID: \"b4639d26-2c65-4332-a8fb-fd5b22a6e67c\") " pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.140208 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/b4639d26-2c65-4332-a8fb-fd5b22a6e67c-host-slash\") pod \"ovnkube-node-zl5lq\" (UID: \"b4639d26-2c65-4332-a8fb-fd5b22a6e67c\") " pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.140223 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/b4639d26-2c65-4332-a8fb-fd5b22a6e67c-ovnkube-config\") pod \"ovnkube-node-zl5lq\" (UID: \"b4639d26-2c65-4332-a8fb-fd5b22a6e67c\") " pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.140298 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/b4639d26-2c65-4332-a8fb-fd5b22a6e67c-run-ovn\") pod \"ovnkube-node-zl5lq\" (UID: \"b4639d26-2c65-4332-a8fb-fd5b22a6e67c\") " pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.140343 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/b4639d26-2c65-4332-a8fb-fd5b22a6e67c-node-log\") pod \"ovnkube-node-zl5lq\" (UID: \"b4639d26-2c65-4332-a8fb-fd5b22a6e67c\") " pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.140404 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b4639d26-2c65-4332-a8fb-fd5b22a6e67c-etc-openvswitch\") pod \"ovnkube-node-zl5lq\" (UID: \"b4639d26-2c65-4332-a8fb-fd5b22a6e67c\") " pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.140433 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/b4639d26-2c65-4332-a8fb-fd5b22a6e67c-systemd-units\") pod \"ovnkube-node-zl5lq\" (UID: \"b4639d26-2c65-4332-a8fb-fd5b22a6e67c\") " pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.140440 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/b4639d26-2c65-4332-a8fb-fd5b22a6e67c-log-socket\") pod \"ovnkube-node-zl5lq\" (UID: \"b4639d26-2c65-4332-a8fb-fd5b22a6e67c\") " pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.140498 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/b4639d26-2c65-4332-a8fb-fd5b22a6e67c-node-log\") pod \"ovnkube-node-zl5lq\" (UID: \"b4639d26-2c65-4332-a8fb-fd5b22a6e67c\") " pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.140519 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b4639d26-2c65-4332-a8fb-fd5b22a6e67c-etc-openvswitch\") pod \"ovnkube-node-zl5lq\" (UID: \"b4639d26-2c65-4332-a8fb-fd5b22a6e67c\") " pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.140340 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b4639d26-2c65-4332-a8fb-fd5b22a6e67c-host-run-ovn-kubernetes\") pod \"ovnkube-node-zl5lq\" (UID: \"b4639d26-2c65-4332-a8fb-fd5b22a6e67c\") " pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.140583 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/b4639d26-2c65-4332-a8fb-fd5b22a6e67c-env-overrides\") pod \"ovnkube-node-zl5lq\" (UID: \"b4639d26-2c65-4332-a8fb-fd5b22a6e67c\") " pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.140544 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/b4639d26-2c65-4332-a8fb-fd5b22a6e67c-log-socket\") pod \"ovnkube-node-zl5lq\" (UID: \"b4639d26-2c65-4332-a8fb-fd5b22a6e67c\") " pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.140422 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/b4639d26-2c65-4332-a8fb-fd5b22a6e67c-run-ovn\") pod \"ovnkube-node-zl5lq\" (UID: \"b4639d26-2c65-4332-a8fb-fd5b22a6e67c\") " pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.140697 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/b4639d26-2c65-4332-a8fb-fd5b22a6e67c-host-kubelet\") pod \"ovnkube-node-zl5lq\" (UID: \"b4639d26-2c65-4332-a8fb-fd5b22a6e67c\") " pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.140865 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/b4639d26-2c65-4332-a8fb-fd5b22a6e67c-host-kubelet\") pod \"ovnkube-node-zl5lq\" (UID: \"b4639d26-2c65-4332-a8fb-fd5b22a6e67c\") " pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.140897 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/b4639d26-2c65-4332-a8fb-fd5b22a6e67c-run-systemd\") pod \"ovnkube-node-zl5lq\" (UID: \"b4639d26-2c65-4332-a8fb-fd5b22a6e67c\") " pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.140999 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/b4639d26-2c65-4332-a8fb-fd5b22a6e67c-run-systemd\") pod \"ovnkube-node-zl5lq\" (UID: \"b4639d26-2c65-4332-a8fb-fd5b22a6e67c\") " pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.141007 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/b4639d26-2c65-4332-a8fb-fd5b22a6e67c-ovnkube-config\") pod \"ovnkube-node-zl5lq\" (UID: \"b4639d26-2c65-4332-a8fb-fd5b22a6e67c\") " pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.141045 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/b4639d26-2c65-4332-a8fb-fd5b22a6e67c-host-run-netns\") pod \"ovnkube-node-zl5lq\" (UID: \"b4639d26-2c65-4332-a8fb-fd5b22a6e67c\") " pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.141111 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/b4639d26-2c65-4332-a8fb-fd5b22a6e67c-host-run-netns\") pod \"ovnkube-node-zl5lq\" (UID: \"b4639d26-2c65-4332-a8fb-fd5b22a6e67c\") " pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.141224 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b4639d26-2c65-4332-a8fb-fd5b22a6e67c-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-zl5lq\" (UID: \"b4639d26-2c65-4332-a8fb-fd5b22a6e67c\") " pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.141255 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b4639d26-2c65-4332-a8fb-fd5b22a6e67c-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-zl5lq\" (UID: \"b4639d26-2c65-4332-a8fb-fd5b22a6e67c\") " pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.141285 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/b4639d26-2c65-4332-a8fb-fd5b22a6e67c-host-cni-bin\") pod \"ovnkube-node-zl5lq\" (UID: \"b4639d26-2c65-4332-a8fb-fd5b22a6e67c\") " pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.141334 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b4639d26-2c65-4332-a8fb-fd5b22a6e67c-run-openvswitch\") pod \"ovnkube-node-zl5lq\" (UID: \"b4639d26-2c65-4332-a8fb-fd5b22a6e67c\") " pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.141372 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/b4639d26-2c65-4332-a8fb-fd5b22a6e67c-host-cni-bin\") pod \"ovnkube-node-zl5lq\" (UID: \"b4639d26-2c65-4332-a8fb-fd5b22a6e67c\") " pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.141404 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b4639d26-2c65-4332-a8fb-fd5b22a6e67c-run-openvswitch\") pod \"ovnkube-node-zl5lq\" (UID: \"b4639d26-2c65-4332-a8fb-fd5b22a6e67c\") " pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.141423 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pd5wf\" (UniqueName: \"kubernetes.io/projected/b4639d26-2c65-4332-a8fb-fd5b22a6e67c-kube-api-access-pd5wf\") pod \"ovnkube-node-zl5lq\" (UID: \"b4639d26-2c65-4332-a8fb-fd5b22a6e67c\") " pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.141466 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b4639d26-2c65-4332-a8fb-fd5b22a6e67c-ovn-node-metrics-cert\") pod \"ovnkube-node-zl5lq\" (UID: \"b4639d26-2c65-4332-a8fb-fd5b22a6e67c\") " pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.141515 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/b4639d26-2c65-4332-a8fb-fd5b22a6e67c-ovnkube-script-lib\") pod \"ovnkube-node-zl5lq\" (UID: \"b4639d26-2c65-4332-a8fb-fd5b22a6e67c\") " pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.141684 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/b4639d26-2c65-4332-a8fb-fd5b22a6e67c-env-overrides\") pod \"ovnkube-node-zl5lq\" (UID: \"b4639d26-2c65-4332-a8fb-fd5b22a6e67c\") " pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.143529 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/b4639d26-2c65-4332-a8fb-fd5b22a6e67c-ovnkube-script-lib\") pod \"ovnkube-node-zl5lq\" (UID: \"b4639d26-2c65-4332-a8fb-fd5b22a6e67c\") " pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.146250 4911 scope.go:117] "RemoveContainer" containerID="00681163bfe474dabf977e0c43ac71148c2a472efd4bc57f3d145ba1134a74aa" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.150032 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b4639d26-2c65-4332-a8fb-fd5b22a6e67c-ovn-node-metrics-cert\") pod \"ovnkube-node-zl5lq\" (UID: \"b4639d26-2c65-4332-a8fb-fd5b22a6e67c\") " pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.160436 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pd5wf\" (UniqueName: \"kubernetes.io/projected/b4639d26-2c65-4332-a8fb-fd5b22a6e67c-kube-api-access-pd5wf\") pod \"ovnkube-node-zl5lq\" (UID: \"b4639d26-2c65-4332-a8fb-fd5b22a6e67c\") " pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.166002 4911 scope.go:117] "RemoveContainer" containerID="3568f71a4a1762e0d9be8ce4c317471f3f99a7c5b92b7daeb9fbace778f7e66b" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.173893 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.185510 4911 scope.go:117] "RemoveContainer" containerID="c06f6555e780f2f9f69cabcd3e26595acfde3e973387ed7b10345c673c17f5d0" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.206948 4911 scope.go:117] "RemoveContainer" containerID="c62aa953604f4ce35ca351ecf4e03da067af96f004c3ba3ea5f3a54b89def33d" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.230094 4911 scope.go:117] "RemoveContainer" containerID="54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.252034 4911 scope.go:117] "RemoveContainer" containerID="aeace5fc3ce0a241b8c32a861db8955e1002329af2b76581fe4289bff5db9e3d" Sep 29 21:34:29 crc kubenswrapper[4911]: E0929 21:34:29.252538 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aeace5fc3ce0a241b8c32a861db8955e1002329af2b76581fe4289bff5db9e3d\": container with ID starting with aeace5fc3ce0a241b8c32a861db8955e1002329af2b76581fe4289bff5db9e3d not found: ID does not exist" containerID="aeace5fc3ce0a241b8c32a861db8955e1002329af2b76581fe4289bff5db9e3d" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.252589 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aeace5fc3ce0a241b8c32a861db8955e1002329af2b76581fe4289bff5db9e3d"} err="failed to get container status \"aeace5fc3ce0a241b8c32a861db8955e1002329af2b76581fe4289bff5db9e3d\": rpc error: code = NotFound desc = could not find container \"aeace5fc3ce0a241b8c32a861db8955e1002329af2b76581fe4289bff5db9e3d\": container with ID starting with aeace5fc3ce0a241b8c32a861db8955e1002329af2b76581fe4289bff5db9e3d not found: ID does not exist" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.252619 4911 scope.go:117] "RemoveContainer" containerID="9af670a0090bf77a03f27eaa0928873b2cbb836a1ac9bd6e176711e06eff996f" Sep 29 21:34:29 crc kubenswrapper[4911]: E0929 21:34:29.252989 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9af670a0090bf77a03f27eaa0928873b2cbb836a1ac9bd6e176711e06eff996f\": container with ID starting with 9af670a0090bf77a03f27eaa0928873b2cbb836a1ac9bd6e176711e06eff996f not found: ID does not exist" containerID="9af670a0090bf77a03f27eaa0928873b2cbb836a1ac9bd6e176711e06eff996f" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.253013 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9af670a0090bf77a03f27eaa0928873b2cbb836a1ac9bd6e176711e06eff996f"} err="failed to get container status \"9af670a0090bf77a03f27eaa0928873b2cbb836a1ac9bd6e176711e06eff996f\": rpc error: code = NotFound desc = could not find container \"9af670a0090bf77a03f27eaa0928873b2cbb836a1ac9bd6e176711e06eff996f\": container with ID starting with 9af670a0090bf77a03f27eaa0928873b2cbb836a1ac9bd6e176711e06eff996f not found: ID does not exist" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.253027 4911 scope.go:117] "RemoveContainer" containerID="68ec54cc6e84d45b9550a2dcbd53b3ffc8cc97cb94da8d9961b29965af08b7e3" Sep 29 21:34:29 crc kubenswrapper[4911]: E0929 21:34:29.253600 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"68ec54cc6e84d45b9550a2dcbd53b3ffc8cc97cb94da8d9961b29965af08b7e3\": container with ID starting with 68ec54cc6e84d45b9550a2dcbd53b3ffc8cc97cb94da8d9961b29965af08b7e3 not found: ID does not exist" containerID="68ec54cc6e84d45b9550a2dcbd53b3ffc8cc97cb94da8d9961b29965af08b7e3" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.253668 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"68ec54cc6e84d45b9550a2dcbd53b3ffc8cc97cb94da8d9961b29965af08b7e3"} err="failed to get container status \"68ec54cc6e84d45b9550a2dcbd53b3ffc8cc97cb94da8d9961b29965af08b7e3\": rpc error: code = NotFound desc = could not find container \"68ec54cc6e84d45b9550a2dcbd53b3ffc8cc97cb94da8d9961b29965af08b7e3\": container with ID starting with 68ec54cc6e84d45b9550a2dcbd53b3ffc8cc97cb94da8d9961b29965af08b7e3 not found: ID does not exist" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.253719 4911 scope.go:117] "RemoveContainer" containerID="db1941e9959584d925b6ac546c234ce0dfaa2960a1277f282b51deb5ee56ec87" Sep 29 21:34:29 crc kubenswrapper[4911]: E0929 21:34:29.254244 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"db1941e9959584d925b6ac546c234ce0dfaa2960a1277f282b51deb5ee56ec87\": container with ID starting with db1941e9959584d925b6ac546c234ce0dfaa2960a1277f282b51deb5ee56ec87 not found: ID does not exist" containerID="db1941e9959584d925b6ac546c234ce0dfaa2960a1277f282b51deb5ee56ec87" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.254273 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"db1941e9959584d925b6ac546c234ce0dfaa2960a1277f282b51deb5ee56ec87"} err="failed to get container status \"db1941e9959584d925b6ac546c234ce0dfaa2960a1277f282b51deb5ee56ec87\": rpc error: code = NotFound desc = could not find container \"db1941e9959584d925b6ac546c234ce0dfaa2960a1277f282b51deb5ee56ec87\": container with ID starting with db1941e9959584d925b6ac546c234ce0dfaa2960a1277f282b51deb5ee56ec87 not found: ID does not exist" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.254320 4911 scope.go:117] "RemoveContainer" containerID="e87c1e61cde26c6ae4496e13ebe8e46857c8656720399f3d89b9e9a75bb4681d" Sep 29 21:34:29 crc kubenswrapper[4911]: E0929 21:34:29.255016 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e87c1e61cde26c6ae4496e13ebe8e46857c8656720399f3d89b9e9a75bb4681d\": container with ID starting with e87c1e61cde26c6ae4496e13ebe8e46857c8656720399f3d89b9e9a75bb4681d not found: ID does not exist" containerID="e87c1e61cde26c6ae4496e13ebe8e46857c8656720399f3d89b9e9a75bb4681d" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.255093 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e87c1e61cde26c6ae4496e13ebe8e46857c8656720399f3d89b9e9a75bb4681d"} err="failed to get container status \"e87c1e61cde26c6ae4496e13ebe8e46857c8656720399f3d89b9e9a75bb4681d\": rpc error: code = NotFound desc = could not find container \"e87c1e61cde26c6ae4496e13ebe8e46857c8656720399f3d89b9e9a75bb4681d\": container with ID starting with e87c1e61cde26c6ae4496e13ebe8e46857c8656720399f3d89b9e9a75bb4681d not found: ID does not exist" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.255151 4911 scope.go:117] "RemoveContainer" containerID="00681163bfe474dabf977e0c43ac71148c2a472efd4bc57f3d145ba1134a74aa" Sep 29 21:34:29 crc kubenswrapper[4911]: E0929 21:34:29.255638 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"00681163bfe474dabf977e0c43ac71148c2a472efd4bc57f3d145ba1134a74aa\": container with ID starting with 00681163bfe474dabf977e0c43ac71148c2a472efd4bc57f3d145ba1134a74aa not found: ID does not exist" containerID="00681163bfe474dabf977e0c43ac71148c2a472efd4bc57f3d145ba1134a74aa" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.255670 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"00681163bfe474dabf977e0c43ac71148c2a472efd4bc57f3d145ba1134a74aa"} err="failed to get container status \"00681163bfe474dabf977e0c43ac71148c2a472efd4bc57f3d145ba1134a74aa\": rpc error: code = NotFound desc = could not find container \"00681163bfe474dabf977e0c43ac71148c2a472efd4bc57f3d145ba1134a74aa\": container with ID starting with 00681163bfe474dabf977e0c43ac71148c2a472efd4bc57f3d145ba1134a74aa not found: ID does not exist" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.255691 4911 scope.go:117] "RemoveContainer" containerID="3568f71a4a1762e0d9be8ce4c317471f3f99a7c5b92b7daeb9fbace778f7e66b" Sep 29 21:34:29 crc kubenswrapper[4911]: E0929 21:34:29.256307 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3568f71a4a1762e0d9be8ce4c317471f3f99a7c5b92b7daeb9fbace778f7e66b\": container with ID starting with 3568f71a4a1762e0d9be8ce4c317471f3f99a7c5b92b7daeb9fbace778f7e66b not found: ID does not exist" containerID="3568f71a4a1762e0d9be8ce4c317471f3f99a7c5b92b7daeb9fbace778f7e66b" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.256335 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3568f71a4a1762e0d9be8ce4c317471f3f99a7c5b92b7daeb9fbace778f7e66b"} err="failed to get container status \"3568f71a4a1762e0d9be8ce4c317471f3f99a7c5b92b7daeb9fbace778f7e66b\": rpc error: code = NotFound desc = could not find container \"3568f71a4a1762e0d9be8ce4c317471f3f99a7c5b92b7daeb9fbace778f7e66b\": container with ID starting with 3568f71a4a1762e0d9be8ce4c317471f3f99a7c5b92b7daeb9fbace778f7e66b not found: ID does not exist" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.256353 4911 scope.go:117] "RemoveContainer" containerID="c06f6555e780f2f9f69cabcd3e26595acfde3e973387ed7b10345c673c17f5d0" Sep 29 21:34:29 crc kubenswrapper[4911]: E0929 21:34:29.256732 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c06f6555e780f2f9f69cabcd3e26595acfde3e973387ed7b10345c673c17f5d0\": container with ID starting with c06f6555e780f2f9f69cabcd3e26595acfde3e973387ed7b10345c673c17f5d0 not found: ID does not exist" containerID="c06f6555e780f2f9f69cabcd3e26595acfde3e973387ed7b10345c673c17f5d0" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.256785 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c06f6555e780f2f9f69cabcd3e26595acfde3e973387ed7b10345c673c17f5d0"} err="failed to get container status \"c06f6555e780f2f9f69cabcd3e26595acfde3e973387ed7b10345c673c17f5d0\": rpc error: code = NotFound desc = could not find container \"c06f6555e780f2f9f69cabcd3e26595acfde3e973387ed7b10345c673c17f5d0\": container with ID starting with c06f6555e780f2f9f69cabcd3e26595acfde3e973387ed7b10345c673c17f5d0 not found: ID does not exist" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.256894 4911 scope.go:117] "RemoveContainer" containerID="c62aa953604f4ce35ca351ecf4e03da067af96f004c3ba3ea5f3a54b89def33d" Sep 29 21:34:29 crc kubenswrapper[4911]: E0929 21:34:29.257343 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c62aa953604f4ce35ca351ecf4e03da067af96f004c3ba3ea5f3a54b89def33d\": container with ID starting with c62aa953604f4ce35ca351ecf4e03da067af96f004c3ba3ea5f3a54b89def33d not found: ID does not exist" containerID="c62aa953604f4ce35ca351ecf4e03da067af96f004c3ba3ea5f3a54b89def33d" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.257372 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c62aa953604f4ce35ca351ecf4e03da067af96f004c3ba3ea5f3a54b89def33d"} err="failed to get container status \"c62aa953604f4ce35ca351ecf4e03da067af96f004c3ba3ea5f3a54b89def33d\": rpc error: code = NotFound desc = could not find container \"c62aa953604f4ce35ca351ecf4e03da067af96f004c3ba3ea5f3a54b89def33d\": container with ID starting with c62aa953604f4ce35ca351ecf4e03da067af96f004c3ba3ea5f3a54b89def33d not found: ID does not exist" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.257390 4911 scope.go:117] "RemoveContainer" containerID="54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b" Sep 29 21:34:29 crc kubenswrapper[4911]: E0929 21:34:29.257782 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\": container with ID starting with 54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b not found: ID does not exist" containerID="54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.257902 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b"} err="failed to get container status \"54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\": rpc error: code = NotFound desc = could not find container \"54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\": container with ID starting with 54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b not found: ID does not exist" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.257961 4911 scope.go:117] "RemoveContainer" containerID="aeace5fc3ce0a241b8c32a861db8955e1002329af2b76581fe4289bff5db9e3d" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.258516 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aeace5fc3ce0a241b8c32a861db8955e1002329af2b76581fe4289bff5db9e3d"} err="failed to get container status \"aeace5fc3ce0a241b8c32a861db8955e1002329af2b76581fe4289bff5db9e3d\": rpc error: code = NotFound desc = could not find container \"aeace5fc3ce0a241b8c32a861db8955e1002329af2b76581fe4289bff5db9e3d\": container with ID starting with aeace5fc3ce0a241b8c32a861db8955e1002329af2b76581fe4289bff5db9e3d not found: ID does not exist" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.258544 4911 scope.go:117] "RemoveContainer" containerID="9af670a0090bf77a03f27eaa0928873b2cbb836a1ac9bd6e176711e06eff996f" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.258951 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9af670a0090bf77a03f27eaa0928873b2cbb836a1ac9bd6e176711e06eff996f"} err="failed to get container status \"9af670a0090bf77a03f27eaa0928873b2cbb836a1ac9bd6e176711e06eff996f\": rpc error: code = NotFound desc = could not find container \"9af670a0090bf77a03f27eaa0928873b2cbb836a1ac9bd6e176711e06eff996f\": container with ID starting with 9af670a0090bf77a03f27eaa0928873b2cbb836a1ac9bd6e176711e06eff996f not found: ID does not exist" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.259003 4911 scope.go:117] "RemoveContainer" containerID="68ec54cc6e84d45b9550a2dcbd53b3ffc8cc97cb94da8d9961b29965af08b7e3" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.259323 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"68ec54cc6e84d45b9550a2dcbd53b3ffc8cc97cb94da8d9961b29965af08b7e3"} err="failed to get container status \"68ec54cc6e84d45b9550a2dcbd53b3ffc8cc97cb94da8d9961b29965af08b7e3\": rpc error: code = NotFound desc = could not find container \"68ec54cc6e84d45b9550a2dcbd53b3ffc8cc97cb94da8d9961b29965af08b7e3\": container with ID starting with 68ec54cc6e84d45b9550a2dcbd53b3ffc8cc97cb94da8d9961b29965af08b7e3 not found: ID does not exist" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.259355 4911 scope.go:117] "RemoveContainer" containerID="db1941e9959584d925b6ac546c234ce0dfaa2960a1277f282b51deb5ee56ec87" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.259629 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"db1941e9959584d925b6ac546c234ce0dfaa2960a1277f282b51deb5ee56ec87"} err="failed to get container status \"db1941e9959584d925b6ac546c234ce0dfaa2960a1277f282b51deb5ee56ec87\": rpc error: code = NotFound desc = could not find container \"db1941e9959584d925b6ac546c234ce0dfaa2960a1277f282b51deb5ee56ec87\": container with ID starting with db1941e9959584d925b6ac546c234ce0dfaa2960a1277f282b51deb5ee56ec87 not found: ID does not exist" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.259656 4911 scope.go:117] "RemoveContainer" containerID="e87c1e61cde26c6ae4496e13ebe8e46857c8656720399f3d89b9e9a75bb4681d" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.260079 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e87c1e61cde26c6ae4496e13ebe8e46857c8656720399f3d89b9e9a75bb4681d"} err="failed to get container status \"e87c1e61cde26c6ae4496e13ebe8e46857c8656720399f3d89b9e9a75bb4681d\": rpc error: code = NotFound desc = could not find container \"e87c1e61cde26c6ae4496e13ebe8e46857c8656720399f3d89b9e9a75bb4681d\": container with ID starting with e87c1e61cde26c6ae4496e13ebe8e46857c8656720399f3d89b9e9a75bb4681d not found: ID does not exist" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.260101 4911 scope.go:117] "RemoveContainer" containerID="00681163bfe474dabf977e0c43ac71148c2a472efd4bc57f3d145ba1134a74aa" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.260354 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"00681163bfe474dabf977e0c43ac71148c2a472efd4bc57f3d145ba1134a74aa"} err="failed to get container status \"00681163bfe474dabf977e0c43ac71148c2a472efd4bc57f3d145ba1134a74aa\": rpc error: code = NotFound desc = could not find container \"00681163bfe474dabf977e0c43ac71148c2a472efd4bc57f3d145ba1134a74aa\": container with ID starting with 00681163bfe474dabf977e0c43ac71148c2a472efd4bc57f3d145ba1134a74aa not found: ID does not exist" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.260375 4911 scope.go:117] "RemoveContainer" containerID="3568f71a4a1762e0d9be8ce4c317471f3f99a7c5b92b7daeb9fbace778f7e66b" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.260628 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3568f71a4a1762e0d9be8ce4c317471f3f99a7c5b92b7daeb9fbace778f7e66b"} err="failed to get container status \"3568f71a4a1762e0d9be8ce4c317471f3f99a7c5b92b7daeb9fbace778f7e66b\": rpc error: code = NotFound desc = could not find container \"3568f71a4a1762e0d9be8ce4c317471f3f99a7c5b92b7daeb9fbace778f7e66b\": container with ID starting with 3568f71a4a1762e0d9be8ce4c317471f3f99a7c5b92b7daeb9fbace778f7e66b not found: ID does not exist" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.260650 4911 scope.go:117] "RemoveContainer" containerID="c06f6555e780f2f9f69cabcd3e26595acfde3e973387ed7b10345c673c17f5d0" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.263671 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c06f6555e780f2f9f69cabcd3e26595acfde3e973387ed7b10345c673c17f5d0"} err="failed to get container status \"c06f6555e780f2f9f69cabcd3e26595acfde3e973387ed7b10345c673c17f5d0\": rpc error: code = NotFound desc = could not find container \"c06f6555e780f2f9f69cabcd3e26595acfde3e973387ed7b10345c673c17f5d0\": container with ID starting with c06f6555e780f2f9f69cabcd3e26595acfde3e973387ed7b10345c673c17f5d0 not found: ID does not exist" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.263693 4911 scope.go:117] "RemoveContainer" containerID="c62aa953604f4ce35ca351ecf4e03da067af96f004c3ba3ea5f3a54b89def33d" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.264121 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c62aa953604f4ce35ca351ecf4e03da067af96f004c3ba3ea5f3a54b89def33d"} err="failed to get container status \"c62aa953604f4ce35ca351ecf4e03da067af96f004c3ba3ea5f3a54b89def33d\": rpc error: code = NotFound desc = could not find container \"c62aa953604f4ce35ca351ecf4e03da067af96f004c3ba3ea5f3a54b89def33d\": container with ID starting with c62aa953604f4ce35ca351ecf4e03da067af96f004c3ba3ea5f3a54b89def33d not found: ID does not exist" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.264141 4911 scope.go:117] "RemoveContainer" containerID="54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.264394 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b"} err="failed to get container status \"54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\": rpc error: code = NotFound desc = could not find container \"54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\": container with ID starting with 54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b not found: ID does not exist" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.264412 4911 scope.go:117] "RemoveContainer" containerID="aeace5fc3ce0a241b8c32a861db8955e1002329af2b76581fe4289bff5db9e3d" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.265016 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aeace5fc3ce0a241b8c32a861db8955e1002329af2b76581fe4289bff5db9e3d"} err="failed to get container status \"aeace5fc3ce0a241b8c32a861db8955e1002329af2b76581fe4289bff5db9e3d\": rpc error: code = NotFound desc = could not find container \"aeace5fc3ce0a241b8c32a861db8955e1002329af2b76581fe4289bff5db9e3d\": container with ID starting with aeace5fc3ce0a241b8c32a861db8955e1002329af2b76581fe4289bff5db9e3d not found: ID does not exist" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.265097 4911 scope.go:117] "RemoveContainer" containerID="9af670a0090bf77a03f27eaa0928873b2cbb836a1ac9bd6e176711e06eff996f" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.265759 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9af670a0090bf77a03f27eaa0928873b2cbb836a1ac9bd6e176711e06eff996f"} err="failed to get container status \"9af670a0090bf77a03f27eaa0928873b2cbb836a1ac9bd6e176711e06eff996f\": rpc error: code = NotFound desc = could not find container \"9af670a0090bf77a03f27eaa0928873b2cbb836a1ac9bd6e176711e06eff996f\": container with ID starting with 9af670a0090bf77a03f27eaa0928873b2cbb836a1ac9bd6e176711e06eff996f not found: ID does not exist" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.265855 4911 scope.go:117] "RemoveContainer" containerID="68ec54cc6e84d45b9550a2dcbd53b3ffc8cc97cb94da8d9961b29965af08b7e3" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.266898 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"68ec54cc6e84d45b9550a2dcbd53b3ffc8cc97cb94da8d9961b29965af08b7e3"} err="failed to get container status \"68ec54cc6e84d45b9550a2dcbd53b3ffc8cc97cb94da8d9961b29965af08b7e3\": rpc error: code = NotFound desc = could not find container \"68ec54cc6e84d45b9550a2dcbd53b3ffc8cc97cb94da8d9961b29965af08b7e3\": container with ID starting with 68ec54cc6e84d45b9550a2dcbd53b3ffc8cc97cb94da8d9961b29965af08b7e3 not found: ID does not exist" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.267066 4911 scope.go:117] "RemoveContainer" containerID="db1941e9959584d925b6ac546c234ce0dfaa2960a1277f282b51deb5ee56ec87" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.268400 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"db1941e9959584d925b6ac546c234ce0dfaa2960a1277f282b51deb5ee56ec87"} err="failed to get container status \"db1941e9959584d925b6ac546c234ce0dfaa2960a1277f282b51deb5ee56ec87\": rpc error: code = NotFound desc = could not find container \"db1941e9959584d925b6ac546c234ce0dfaa2960a1277f282b51deb5ee56ec87\": container with ID starting with db1941e9959584d925b6ac546c234ce0dfaa2960a1277f282b51deb5ee56ec87 not found: ID does not exist" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.268435 4911 scope.go:117] "RemoveContainer" containerID="e87c1e61cde26c6ae4496e13ebe8e46857c8656720399f3d89b9e9a75bb4681d" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.268837 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e87c1e61cde26c6ae4496e13ebe8e46857c8656720399f3d89b9e9a75bb4681d"} err="failed to get container status \"e87c1e61cde26c6ae4496e13ebe8e46857c8656720399f3d89b9e9a75bb4681d\": rpc error: code = NotFound desc = could not find container \"e87c1e61cde26c6ae4496e13ebe8e46857c8656720399f3d89b9e9a75bb4681d\": container with ID starting with e87c1e61cde26c6ae4496e13ebe8e46857c8656720399f3d89b9e9a75bb4681d not found: ID does not exist" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.268894 4911 scope.go:117] "RemoveContainer" containerID="00681163bfe474dabf977e0c43ac71148c2a472efd4bc57f3d145ba1134a74aa" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.269260 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"00681163bfe474dabf977e0c43ac71148c2a472efd4bc57f3d145ba1134a74aa"} err="failed to get container status \"00681163bfe474dabf977e0c43ac71148c2a472efd4bc57f3d145ba1134a74aa\": rpc error: code = NotFound desc = could not find container \"00681163bfe474dabf977e0c43ac71148c2a472efd4bc57f3d145ba1134a74aa\": container with ID starting with 00681163bfe474dabf977e0c43ac71148c2a472efd4bc57f3d145ba1134a74aa not found: ID does not exist" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.269289 4911 scope.go:117] "RemoveContainer" containerID="3568f71a4a1762e0d9be8ce4c317471f3f99a7c5b92b7daeb9fbace778f7e66b" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.269637 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3568f71a4a1762e0d9be8ce4c317471f3f99a7c5b92b7daeb9fbace778f7e66b"} err="failed to get container status \"3568f71a4a1762e0d9be8ce4c317471f3f99a7c5b92b7daeb9fbace778f7e66b\": rpc error: code = NotFound desc = could not find container \"3568f71a4a1762e0d9be8ce4c317471f3f99a7c5b92b7daeb9fbace778f7e66b\": container with ID starting with 3568f71a4a1762e0d9be8ce4c317471f3f99a7c5b92b7daeb9fbace778f7e66b not found: ID does not exist" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.269660 4911 scope.go:117] "RemoveContainer" containerID="c06f6555e780f2f9f69cabcd3e26595acfde3e973387ed7b10345c673c17f5d0" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.270074 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c06f6555e780f2f9f69cabcd3e26595acfde3e973387ed7b10345c673c17f5d0"} err="failed to get container status \"c06f6555e780f2f9f69cabcd3e26595acfde3e973387ed7b10345c673c17f5d0\": rpc error: code = NotFound desc = could not find container \"c06f6555e780f2f9f69cabcd3e26595acfde3e973387ed7b10345c673c17f5d0\": container with ID starting with c06f6555e780f2f9f69cabcd3e26595acfde3e973387ed7b10345c673c17f5d0 not found: ID does not exist" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.270098 4911 scope.go:117] "RemoveContainer" containerID="c62aa953604f4ce35ca351ecf4e03da067af96f004c3ba3ea5f3a54b89def33d" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.270384 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c62aa953604f4ce35ca351ecf4e03da067af96f004c3ba3ea5f3a54b89def33d"} err="failed to get container status \"c62aa953604f4ce35ca351ecf4e03da067af96f004c3ba3ea5f3a54b89def33d\": rpc error: code = NotFound desc = could not find container \"c62aa953604f4ce35ca351ecf4e03da067af96f004c3ba3ea5f3a54b89def33d\": container with ID starting with c62aa953604f4ce35ca351ecf4e03da067af96f004c3ba3ea5f3a54b89def33d not found: ID does not exist" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.270427 4911 scope.go:117] "RemoveContainer" containerID="54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.270895 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b"} err="failed to get container status \"54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\": rpc error: code = NotFound desc = could not find container \"54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\": container with ID starting with 54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b not found: ID does not exist" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.270923 4911 scope.go:117] "RemoveContainer" containerID="aeace5fc3ce0a241b8c32a861db8955e1002329af2b76581fe4289bff5db9e3d" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.271302 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aeace5fc3ce0a241b8c32a861db8955e1002329af2b76581fe4289bff5db9e3d"} err="failed to get container status \"aeace5fc3ce0a241b8c32a861db8955e1002329af2b76581fe4289bff5db9e3d\": rpc error: code = NotFound desc = could not find container \"aeace5fc3ce0a241b8c32a861db8955e1002329af2b76581fe4289bff5db9e3d\": container with ID starting with aeace5fc3ce0a241b8c32a861db8955e1002329af2b76581fe4289bff5db9e3d not found: ID does not exist" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.271356 4911 scope.go:117] "RemoveContainer" containerID="9af670a0090bf77a03f27eaa0928873b2cbb836a1ac9bd6e176711e06eff996f" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.271687 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9af670a0090bf77a03f27eaa0928873b2cbb836a1ac9bd6e176711e06eff996f"} err="failed to get container status \"9af670a0090bf77a03f27eaa0928873b2cbb836a1ac9bd6e176711e06eff996f\": rpc error: code = NotFound desc = could not find container \"9af670a0090bf77a03f27eaa0928873b2cbb836a1ac9bd6e176711e06eff996f\": container with ID starting with 9af670a0090bf77a03f27eaa0928873b2cbb836a1ac9bd6e176711e06eff996f not found: ID does not exist" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.271717 4911 scope.go:117] "RemoveContainer" containerID="68ec54cc6e84d45b9550a2dcbd53b3ffc8cc97cb94da8d9961b29965af08b7e3" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.272014 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"68ec54cc6e84d45b9550a2dcbd53b3ffc8cc97cb94da8d9961b29965af08b7e3"} err="failed to get container status \"68ec54cc6e84d45b9550a2dcbd53b3ffc8cc97cb94da8d9961b29965af08b7e3\": rpc error: code = NotFound desc = could not find container \"68ec54cc6e84d45b9550a2dcbd53b3ffc8cc97cb94da8d9961b29965af08b7e3\": container with ID starting with 68ec54cc6e84d45b9550a2dcbd53b3ffc8cc97cb94da8d9961b29965af08b7e3 not found: ID does not exist" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.272042 4911 scope.go:117] "RemoveContainer" containerID="db1941e9959584d925b6ac546c234ce0dfaa2960a1277f282b51deb5ee56ec87" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.272380 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"db1941e9959584d925b6ac546c234ce0dfaa2960a1277f282b51deb5ee56ec87"} err="failed to get container status \"db1941e9959584d925b6ac546c234ce0dfaa2960a1277f282b51deb5ee56ec87\": rpc error: code = NotFound desc = could not find container \"db1941e9959584d925b6ac546c234ce0dfaa2960a1277f282b51deb5ee56ec87\": container with ID starting with db1941e9959584d925b6ac546c234ce0dfaa2960a1277f282b51deb5ee56ec87 not found: ID does not exist" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.272402 4911 scope.go:117] "RemoveContainer" containerID="e87c1e61cde26c6ae4496e13ebe8e46857c8656720399f3d89b9e9a75bb4681d" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.272768 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e87c1e61cde26c6ae4496e13ebe8e46857c8656720399f3d89b9e9a75bb4681d"} err="failed to get container status \"e87c1e61cde26c6ae4496e13ebe8e46857c8656720399f3d89b9e9a75bb4681d\": rpc error: code = NotFound desc = could not find container \"e87c1e61cde26c6ae4496e13ebe8e46857c8656720399f3d89b9e9a75bb4681d\": container with ID starting with e87c1e61cde26c6ae4496e13ebe8e46857c8656720399f3d89b9e9a75bb4681d not found: ID does not exist" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.272846 4911 scope.go:117] "RemoveContainer" containerID="00681163bfe474dabf977e0c43ac71148c2a472efd4bc57f3d145ba1134a74aa" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.273249 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"00681163bfe474dabf977e0c43ac71148c2a472efd4bc57f3d145ba1134a74aa"} err="failed to get container status \"00681163bfe474dabf977e0c43ac71148c2a472efd4bc57f3d145ba1134a74aa\": rpc error: code = NotFound desc = could not find container \"00681163bfe474dabf977e0c43ac71148c2a472efd4bc57f3d145ba1134a74aa\": container with ID starting with 00681163bfe474dabf977e0c43ac71148c2a472efd4bc57f3d145ba1134a74aa not found: ID does not exist" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.273275 4911 scope.go:117] "RemoveContainer" containerID="3568f71a4a1762e0d9be8ce4c317471f3f99a7c5b92b7daeb9fbace778f7e66b" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.273724 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3568f71a4a1762e0d9be8ce4c317471f3f99a7c5b92b7daeb9fbace778f7e66b"} err="failed to get container status \"3568f71a4a1762e0d9be8ce4c317471f3f99a7c5b92b7daeb9fbace778f7e66b\": rpc error: code = NotFound desc = could not find container \"3568f71a4a1762e0d9be8ce4c317471f3f99a7c5b92b7daeb9fbace778f7e66b\": container with ID starting with 3568f71a4a1762e0d9be8ce4c317471f3f99a7c5b92b7daeb9fbace778f7e66b not found: ID does not exist" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.273746 4911 scope.go:117] "RemoveContainer" containerID="c06f6555e780f2f9f69cabcd3e26595acfde3e973387ed7b10345c673c17f5d0" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.274087 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c06f6555e780f2f9f69cabcd3e26595acfde3e973387ed7b10345c673c17f5d0"} err="failed to get container status \"c06f6555e780f2f9f69cabcd3e26595acfde3e973387ed7b10345c673c17f5d0\": rpc error: code = NotFound desc = could not find container \"c06f6555e780f2f9f69cabcd3e26595acfde3e973387ed7b10345c673c17f5d0\": container with ID starting with c06f6555e780f2f9f69cabcd3e26595acfde3e973387ed7b10345c673c17f5d0 not found: ID does not exist" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.274143 4911 scope.go:117] "RemoveContainer" containerID="c62aa953604f4ce35ca351ecf4e03da067af96f004c3ba3ea5f3a54b89def33d" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.274640 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c62aa953604f4ce35ca351ecf4e03da067af96f004c3ba3ea5f3a54b89def33d"} err="failed to get container status \"c62aa953604f4ce35ca351ecf4e03da067af96f004c3ba3ea5f3a54b89def33d\": rpc error: code = NotFound desc = could not find container \"c62aa953604f4ce35ca351ecf4e03da067af96f004c3ba3ea5f3a54b89def33d\": container with ID starting with c62aa953604f4ce35ca351ecf4e03da067af96f004c3ba3ea5f3a54b89def33d not found: ID does not exist" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.274660 4911 scope.go:117] "RemoveContainer" containerID="54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.275038 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b"} err="failed to get container status \"54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\": rpc error: code = NotFound desc = could not find container \"54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b\": container with ID starting with 54756c9fa4e428e033f0f77e7a8a52743cae0d7bc14dfee4d9702621dc48212b not found: ID does not exist" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.275093 4911 scope.go:117] "RemoveContainer" containerID="aeace5fc3ce0a241b8c32a861db8955e1002329af2b76581fe4289bff5db9e3d" Sep 29 21:34:29 crc kubenswrapper[4911]: I0929 21:34:29.275451 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aeace5fc3ce0a241b8c32a861db8955e1002329af2b76581fe4289bff5db9e3d"} err="failed to get container status \"aeace5fc3ce0a241b8c32a861db8955e1002329af2b76581fe4289bff5db9e3d\": rpc error: code = NotFound desc = could not find container \"aeace5fc3ce0a241b8c32a861db8955e1002329af2b76581fe4289bff5db9e3d\": container with ID starting with aeace5fc3ce0a241b8c32a861db8955e1002329af2b76581fe4289bff5db9e3d not found: ID does not exist" Sep 29 21:34:30 crc kubenswrapper[4911]: I0929 21:34:30.052355 4911 generic.go:334] "Generic (PLEG): container finished" podID="b4639d26-2c65-4332-a8fb-fd5b22a6e67c" containerID="b258e68fbe479383a25c844ce7e9dd877d03b25c41e5bc1afb75752654f01353" exitCode=0 Sep 29 21:34:30 crc kubenswrapper[4911]: I0929 21:34:30.052403 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" event={"ID":"b4639d26-2c65-4332-a8fb-fd5b22a6e67c","Type":"ContainerDied","Data":"b258e68fbe479383a25c844ce7e9dd877d03b25c41e5bc1afb75752654f01353"} Sep 29 21:34:30 crc kubenswrapper[4911]: I0929 21:34:30.052438 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" event={"ID":"b4639d26-2c65-4332-a8fb-fd5b22a6e67c","Type":"ContainerStarted","Data":"3fd90d1a34ba07c238bbae0682009928efb5fb95cdd2a20a1121051a9f42fe77"} Sep 29 21:34:30 crc kubenswrapper[4911]: I0929 21:34:30.714518 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4e3aa70f-b0da-44c9-a850-96d4494b02fc" path="/var/lib/kubelet/pods/4e3aa70f-b0da-44c9-a850-96d4494b02fc/volumes" Sep 29 21:34:31 crc kubenswrapper[4911]: I0929 21:34:31.062981 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" event={"ID":"b4639d26-2c65-4332-a8fb-fd5b22a6e67c","Type":"ContainerStarted","Data":"371d6ed46aee85de3effaab346e96aa1ccd4f38c99f54c9adc10d4370593f515"} Sep 29 21:34:31 crc kubenswrapper[4911]: I0929 21:34:31.063044 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" event={"ID":"b4639d26-2c65-4332-a8fb-fd5b22a6e67c","Type":"ContainerStarted","Data":"d00db0573369184b076516e297d7fd65d058c86efa3c7bbc3f19403bc449fad2"} Sep 29 21:34:31 crc kubenswrapper[4911]: I0929 21:34:31.063067 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" event={"ID":"b4639d26-2c65-4332-a8fb-fd5b22a6e67c","Type":"ContainerStarted","Data":"662104d4d6c3df3f26d851031770ce5075a8574a1939f899ca5f4082c23494a9"} Sep 29 21:34:31 crc kubenswrapper[4911]: I0929 21:34:31.063087 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" event={"ID":"b4639d26-2c65-4332-a8fb-fd5b22a6e67c","Type":"ContainerStarted","Data":"74d14d8e446c67e6bb1147d915f7f65611767ae91a4dc42d5994e83fe23c71ac"} Sep 29 21:34:31 crc kubenswrapper[4911]: I0929 21:34:31.063107 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" event={"ID":"b4639d26-2c65-4332-a8fb-fd5b22a6e67c","Type":"ContainerStarted","Data":"bb205ce3177b09a8093357802baa3befc0cde5a13a853e33b4d896ae328fe29a"} Sep 29 21:34:32 crc kubenswrapper[4911]: I0929 21:34:32.074265 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" event={"ID":"b4639d26-2c65-4332-a8fb-fd5b22a6e67c","Type":"ContainerStarted","Data":"27721449db6320277224e4eb5e098955d734d93aa92c8cc3616e48a34eb8c8c5"} Sep 29 21:34:34 crc kubenswrapper[4911]: I0929 21:34:34.096446 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" event={"ID":"b4639d26-2c65-4332-a8fb-fd5b22a6e67c","Type":"ContainerStarted","Data":"87660076f22b8bbff4f002a12bd119cf55551e3908f903c3ad111ad036ae9fb4"} Sep 29 21:34:36 crc kubenswrapper[4911]: I0929 21:34:36.113199 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" event={"ID":"b4639d26-2c65-4332-a8fb-fd5b22a6e67c","Type":"ContainerStarted","Data":"d7c176583bdb55ea2cc6d3d8583d2f457f04fe8de38fdcaad8dc76f7cf99127a"} Sep 29 21:34:36 crc kubenswrapper[4911]: I0929 21:34:36.113671 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" Sep 29 21:34:36 crc kubenswrapper[4911]: I0929 21:34:36.142299 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" podStartSLOduration=8.142283273 podStartE2EDuration="8.142283273s" podCreationTimestamp="2025-09-29 21:34:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:34:36.141034453 +0000 UTC m=+554.118147184" watchObservedRunningTime="2025-09-29 21:34:36.142283273 +0000 UTC m=+554.119395964" Sep 29 21:34:36 crc kubenswrapper[4911]: I0929 21:34:36.159478 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" Sep 29 21:34:37 crc kubenswrapper[4911]: I0929 21:34:37.121935 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" Sep 29 21:34:37 crc kubenswrapper[4911]: I0929 21:34:37.122019 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" Sep 29 21:34:37 crc kubenswrapper[4911]: I0929 21:34:37.172940 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" Sep 29 21:34:43 crc kubenswrapper[4911]: I0929 21:34:43.701427 4911 scope.go:117] "RemoveContainer" containerID="894308c4aead3d39450f470850392668331de7f234bd6595c49ea96fa39181d8" Sep 29 21:34:43 crc kubenswrapper[4911]: E0929 21:34:43.702189 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-lrfbg_openshift-multus(1179c900-e866-4c5a-bb06-6032cc03a075)\"" pod="openshift-multus/multus-lrfbg" podUID="1179c900-e866-4c5a-bb06-6032cc03a075" Sep 29 21:34:52 crc kubenswrapper[4911]: I0929 21:34:52.769452 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170vvfsx"] Sep 29 21:34:52 crc kubenswrapper[4911]: I0929 21:34:52.771947 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170vvfsx" Sep 29 21:34:52 crc kubenswrapper[4911]: I0929 21:34:52.777101 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Sep 29 21:34:52 crc kubenswrapper[4911]: I0929 21:34:52.788992 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170vvfsx"] Sep 29 21:34:52 crc kubenswrapper[4911]: I0929 21:34:52.893873 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e417e0ba-3b3f-4700-8921-345cc400b7ba-util\") pod \"e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170vvfsx\" (UID: \"e417e0ba-3b3f-4700-8921-345cc400b7ba\") " pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170vvfsx" Sep 29 21:34:52 crc kubenswrapper[4911]: I0929 21:34:52.893974 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e417e0ba-3b3f-4700-8921-345cc400b7ba-bundle\") pod \"e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170vvfsx\" (UID: \"e417e0ba-3b3f-4700-8921-345cc400b7ba\") " pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170vvfsx" Sep 29 21:34:52 crc kubenswrapper[4911]: I0929 21:34:52.894018 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xsj7x\" (UniqueName: \"kubernetes.io/projected/e417e0ba-3b3f-4700-8921-345cc400b7ba-kube-api-access-xsj7x\") pod \"e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170vvfsx\" (UID: \"e417e0ba-3b3f-4700-8921-345cc400b7ba\") " pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170vvfsx" Sep 29 21:34:52 crc kubenswrapper[4911]: I0929 21:34:52.995726 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e417e0ba-3b3f-4700-8921-345cc400b7ba-util\") pod \"e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170vvfsx\" (UID: \"e417e0ba-3b3f-4700-8921-345cc400b7ba\") " pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170vvfsx" Sep 29 21:34:52 crc kubenswrapper[4911]: I0929 21:34:52.995932 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e417e0ba-3b3f-4700-8921-345cc400b7ba-bundle\") pod \"e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170vvfsx\" (UID: \"e417e0ba-3b3f-4700-8921-345cc400b7ba\") " pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170vvfsx" Sep 29 21:34:52 crc kubenswrapper[4911]: I0929 21:34:52.995995 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xsj7x\" (UniqueName: \"kubernetes.io/projected/e417e0ba-3b3f-4700-8921-345cc400b7ba-kube-api-access-xsj7x\") pod \"e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170vvfsx\" (UID: \"e417e0ba-3b3f-4700-8921-345cc400b7ba\") " pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170vvfsx" Sep 29 21:34:52 crc kubenswrapper[4911]: I0929 21:34:52.996669 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e417e0ba-3b3f-4700-8921-345cc400b7ba-util\") pod \"e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170vvfsx\" (UID: \"e417e0ba-3b3f-4700-8921-345cc400b7ba\") " pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170vvfsx" Sep 29 21:34:52 crc kubenswrapper[4911]: I0929 21:34:52.996734 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e417e0ba-3b3f-4700-8921-345cc400b7ba-bundle\") pod \"e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170vvfsx\" (UID: \"e417e0ba-3b3f-4700-8921-345cc400b7ba\") " pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170vvfsx" Sep 29 21:34:53 crc kubenswrapper[4911]: I0929 21:34:53.039027 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xsj7x\" (UniqueName: \"kubernetes.io/projected/e417e0ba-3b3f-4700-8921-345cc400b7ba-kube-api-access-xsj7x\") pod \"e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170vvfsx\" (UID: \"e417e0ba-3b3f-4700-8921-345cc400b7ba\") " pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170vvfsx" Sep 29 21:34:53 crc kubenswrapper[4911]: I0929 21:34:53.109777 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170vvfsx" Sep 29 21:34:53 crc kubenswrapper[4911]: I0929 21:34:53.162634 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c0frlgc"] Sep 29 21:34:53 crc kubenswrapper[4911]: I0929 21:34:53.165294 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c0frlgc" Sep 29 21:34:53 crc kubenswrapper[4911]: E0929 21:34:53.174289 4911 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170vvfsx_openshift-marketplace_e417e0ba-3b3f-4700-8921-345cc400b7ba_0(0ded71de0bbc53f5def3ff7d53354328d7f86649ee6f6afc1fbcd27b43397ff9): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Sep 29 21:34:53 crc kubenswrapper[4911]: E0929 21:34:53.174351 4911 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170vvfsx_openshift-marketplace_e417e0ba-3b3f-4700-8921-345cc400b7ba_0(0ded71de0bbc53f5def3ff7d53354328d7f86649ee6f6afc1fbcd27b43397ff9): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170vvfsx" Sep 29 21:34:53 crc kubenswrapper[4911]: E0929 21:34:53.174377 4911 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170vvfsx_openshift-marketplace_e417e0ba-3b3f-4700-8921-345cc400b7ba_0(0ded71de0bbc53f5def3ff7d53354328d7f86649ee6f6afc1fbcd27b43397ff9): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170vvfsx" Sep 29 21:34:53 crc kubenswrapper[4911]: E0929 21:34:53.174432 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170vvfsx_openshift-marketplace(e417e0ba-3b3f-4700-8921-345cc400b7ba)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170vvfsx_openshift-marketplace(e417e0ba-3b3f-4700-8921-345cc400b7ba)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170vvfsx_openshift-marketplace_e417e0ba-3b3f-4700-8921-345cc400b7ba_0(0ded71de0bbc53f5def3ff7d53354328d7f86649ee6f6afc1fbcd27b43397ff9): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170vvfsx" podUID="e417e0ba-3b3f-4700-8921-345cc400b7ba" Sep 29 21:34:53 crc kubenswrapper[4911]: I0929 21:34:53.176269 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c0frlgc"] Sep 29 21:34:53 crc kubenswrapper[4911]: I0929 21:34:53.234832 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170vvfsx" Sep 29 21:34:53 crc kubenswrapper[4911]: I0929 21:34:53.235722 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170vvfsx" Sep 29 21:34:53 crc kubenswrapper[4911]: E0929 21:34:53.261893 4911 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170vvfsx_openshift-marketplace_e417e0ba-3b3f-4700-8921-345cc400b7ba_0(ca0b17110627ea1951e67dbc993ad18c844e5128666f03b2ea30374240c82f0f): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Sep 29 21:34:53 crc kubenswrapper[4911]: E0929 21:34:53.261964 4911 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170vvfsx_openshift-marketplace_e417e0ba-3b3f-4700-8921-345cc400b7ba_0(ca0b17110627ea1951e67dbc993ad18c844e5128666f03b2ea30374240c82f0f): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170vvfsx" Sep 29 21:34:53 crc kubenswrapper[4911]: E0929 21:34:53.262002 4911 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170vvfsx_openshift-marketplace_e417e0ba-3b3f-4700-8921-345cc400b7ba_0(ca0b17110627ea1951e67dbc993ad18c844e5128666f03b2ea30374240c82f0f): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170vvfsx" Sep 29 21:34:53 crc kubenswrapper[4911]: E0929 21:34:53.262068 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170vvfsx_openshift-marketplace(e417e0ba-3b3f-4700-8921-345cc400b7ba)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170vvfsx_openshift-marketplace(e417e0ba-3b3f-4700-8921-345cc400b7ba)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170vvfsx_openshift-marketplace_e417e0ba-3b3f-4700-8921-345cc400b7ba_0(ca0b17110627ea1951e67dbc993ad18c844e5128666f03b2ea30374240c82f0f): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170vvfsx" podUID="e417e0ba-3b3f-4700-8921-345cc400b7ba" Sep 29 21:34:53 crc kubenswrapper[4911]: I0929 21:34:53.303653 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/98ba0cf0-64a8-482d-af55-862d182fefdb-util\") pod \"c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c0frlgc\" (UID: \"98ba0cf0-64a8-482d-af55-862d182fefdb\") " pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c0frlgc" Sep 29 21:34:53 crc kubenswrapper[4911]: I0929 21:34:53.303924 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6x44f\" (UniqueName: \"kubernetes.io/projected/98ba0cf0-64a8-482d-af55-862d182fefdb-kube-api-access-6x44f\") pod \"c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c0frlgc\" (UID: \"98ba0cf0-64a8-482d-af55-862d182fefdb\") " pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c0frlgc" Sep 29 21:34:53 crc kubenswrapper[4911]: I0929 21:34:53.304111 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/98ba0cf0-64a8-482d-af55-862d182fefdb-bundle\") pod \"c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c0frlgc\" (UID: \"98ba0cf0-64a8-482d-af55-862d182fefdb\") " pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c0frlgc" Sep 29 21:34:53 crc kubenswrapper[4911]: I0929 21:34:53.406130 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/98ba0cf0-64a8-482d-af55-862d182fefdb-util\") pod \"c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c0frlgc\" (UID: \"98ba0cf0-64a8-482d-af55-862d182fefdb\") " pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c0frlgc" Sep 29 21:34:53 crc kubenswrapper[4911]: I0929 21:34:53.406298 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6x44f\" (UniqueName: \"kubernetes.io/projected/98ba0cf0-64a8-482d-af55-862d182fefdb-kube-api-access-6x44f\") pod \"c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c0frlgc\" (UID: \"98ba0cf0-64a8-482d-af55-862d182fefdb\") " pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c0frlgc" Sep 29 21:34:53 crc kubenswrapper[4911]: I0929 21:34:53.406424 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/98ba0cf0-64a8-482d-af55-862d182fefdb-bundle\") pod \"c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c0frlgc\" (UID: \"98ba0cf0-64a8-482d-af55-862d182fefdb\") " pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c0frlgc" Sep 29 21:34:53 crc kubenswrapper[4911]: I0929 21:34:53.407624 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/98ba0cf0-64a8-482d-af55-862d182fefdb-util\") pod \"c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c0frlgc\" (UID: \"98ba0cf0-64a8-482d-af55-862d182fefdb\") " pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c0frlgc" Sep 29 21:34:53 crc kubenswrapper[4911]: I0929 21:34:53.408240 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/98ba0cf0-64a8-482d-af55-862d182fefdb-bundle\") pod \"c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c0frlgc\" (UID: \"98ba0cf0-64a8-482d-af55-862d182fefdb\") " pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c0frlgc" Sep 29 21:34:53 crc kubenswrapper[4911]: I0929 21:34:53.437331 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6x44f\" (UniqueName: \"kubernetes.io/projected/98ba0cf0-64a8-482d-af55-862d182fefdb-kube-api-access-6x44f\") pod \"c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c0frlgc\" (UID: \"98ba0cf0-64a8-482d-af55-862d182fefdb\") " pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c0frlgc" Sep 29 21:34:53 crc kubenswrapper[4911]: I0929 21:34:53.513855 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c0frlgc" Sep 29 21:34:53 crc kubenswrapper[4911]: E0929 21:34:53.551744 4911 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c0frlgc_openshift-marketplace_98ba0cf0-64a8-482d-af55-862d182fefdb_0(b7cd8f1fc17196f6cf0112e3994c6119b8cb17b72d3e8751dc4acd0db69e8f2d): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Sep 29 21:34:53 crc kubenswrapper[4911]: E0929 21:34:53.551896 4911 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c0frlgc_openshift-marketplace_98ba0cf0-64a8-482d-af55-862d182fefdb_0(b7cd8f1fc17196f6cf0112e3994c6119b8cb17b72d3e8751dc4acd0db69e8f2d): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c0frlgc" Sep 29 21:34:53 crc kubenswrapper[4911]: E0929 21:34:53.551935 4911 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c0frlgc_openshift-marketplace_98ba0cf0-64a8-482d-af55-862d182fefdb_0(b7cd8f1fc17196f6cf0112e3994c6119b8cb17b72d3e8751dc4acd0db69e8f2d): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c0frlgc" Sep 29 21:34:53 crc kubenswrapper[4911]: E0929 21:34:53.552005 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c0frlgc_openshift-marketplace(98ba0cf0-64a8-482d-af55-862d182fefdb)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c0frlgc_openshift-marketplace(98ba0cf0-64a8-482d-af55-862d182fefdb)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c0frlgc_openshift-marketplace_98ba0cf0-64a8-482d-af55-862d182fefdb_0(b7cd8f1fc17196f6cf0112e3994c6119b8cb17b72d3e8751dc4acd0db69e8f2d): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c0frlgc" podUID="98ba0cf0-64a8-482d-af55-862d182fefdb" Sep 29 21:34:54 crc kubenswrapper[4911]: I0929 21:34:54.242117 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c0frlgc" Sep 29 21:34:54 crc kubenswrapper[4911]: I0929 21:34:54.243320 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c0frlgc" Sep 29 21:34:54 crc kubenswrapper[4911]: E0929 21:34:54.285322 4911 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c0frlgc_openshift-marketplace_98ba0cf0-64a8-482d-af55-862d182fefdb_0(dbd1f3a96bfd01c7187c88bc947cf647e78cd17a530afac5e403ffddd1ec2c16): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Sep 29 21:34:54 crc kubenswrapper[4911]: E0929 21:34:54.285476 4911 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c0frlgc_openshift-marketplace_98ba0cf0-64a8-482d-af55-862d182fefdb_0(dbd1f3a96bfd01c7187c88bc947cf647e78cd17a530afac5e403ffddd1ec2c16): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c0frlgc" Sep 29 21:34:54 crc kubenswrapper[4911]: E0929 21:34:54.285537 4911 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c0frlgc_openshift-marketplace_98ba0cf0-64a8-482d-af55-862d182fefdb_0(dbd1f3a96bfd01c7187c88bc947cf647e78cd17a530afac5e403ffddd1ec2c16): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c0frlgc" Sep 29 21:34:54 crc kubenswrapper[4911]: E0929 21:34:54.285663 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c0frlgc_openshift-marketplace(98ba0cf0-64a8-482d-af55-862d182fefdb)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c0frlgc_openshift-marketplace(98ba0cf0-64a8-482d-af55-862d182fefdb)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c0frlgc_openshift-marketplace_98ba0cf0-64a8-482d-af55-862d182fefdb_0(dbd1f3a96bfd01c7187c88bc947cf647e78cd17a530afac5e403ffddd1ec2c16): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c0frlgc" podUID="98ba0cf0-64a8-482d-af55-862d182fefdb" Sep 29 21:34:55 crc kubenswrapper[4911]: I0929 21:34:55.210953 4911 patch_prober.go:28] interesting pod/machine-config-daemon-w647f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 21:34:55 crc kubenswrapper[4911]: I0929 21:34:55.211079 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 21:34:55 crc kubenswrapper[4911]: I0929 21:34:55.211172 4911 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-w647f" Sep 29 21:34:55 crc kubenswrapper[4911]: I0929 21:34:55.212189 4911 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"169a8ea1d84bd44d55b71bca47978a16d348f46e726c92490e98ec486b65a803"} pod="openshift-machine-config-operator/machine-config-daemon-w647f" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 21:34:55 crc kubenswrapper[4911]: I0929 21:34:55.212307 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" containerName="machine-config-daemon" containerID="cri-o://169a8ea1d84bd44d55b71bca47978a16d348f46e726c92490e98ec486b65a803" gracePeriod=600 Sep 29 21:34:55 crc kubenswrapper[4911]: I0929 21:34:55.701731 4911 scope.go:117] "RemoveContainer" containerID="894308c4aead3d39450f470850392668331de7f234bd6595c49ea96fa39181d8" Sep 29 21:34:56 crc kubenswrapper[4911]: I0929 21:34:56.256081 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-lrfbg_1179c900-e866-4c5a-bb06-6032cc03a075/kube-multus/2.log" Sep 29 21:34:56 crc kubenswrapper[4911]: I0929 21:34:56.257087 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-lrfbg_1179c900-e866-4c5a-bb06-6032cc03a075/kube-multus/1.log" Sep 29 21:34:56 crc kubenswrapper[4911]: I0929 21:34:56.257225 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-lrfbg" event={"ID":"1179c900-e866-4c5a-bb06-6032cc03a075","Type":"ContainerStarted","Data":"8a3ad9e47a5193507002ab292e2fee63162373c07591f92969cad51497087591"} Sep 29 21:34:56 crc kubenswrapper[4911]: I0929 21:34:56.259883 4911 generic.go:334] "Generic (PLEG): container finished" podID="50640abc-40db-4390-82d1-f3cfc76da71c" containerID="169a8ea1d84bd44d55b71bca47978a16d348f46e726c92490e98ec486b65a803" exitCode=0 Sep 29 21:34:56 crc kubenswrapper[4911]: I0929 21:34:56.259925 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w647f" event={"ID":"50640abc-40db-4390-82d1-f3cfc76da71c","Type":"ContainerDied","Data":"169a8ea1d84bd44d55b71bca47978a16d348f46e726c92490e98ec486b65a803"} Sep 29 21:34:56 crc kubenswrapper[4911]: I0929 21:34:56.259984 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w647f" event={"ID":"50640abc-40db-4390-82d1-f3cfc76da71c","Type":"ContainerStarted","Data":"3f4ab040675bb23b2a12316fff86293a5b72278bad6949dfbe357c01f7df89f3"} Sep 29 21:34:56 crc kubenswrapper[4911]: I0929 21:34:56.260006 4911 scope.go:117] "RemoveContainer" containerID="e38c1fe31b443189675002028c28967c8680009a80ff20447fa8074033d6557e" Sep 29 21:34:59 crc kubenswrapper[4911]: I0929 21:34:59.217231 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-zl5lq" Sep 29 21:35:04 crc kubenswrapper[4911]: I0929 21:35:04.701434 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170vvfsx" Sep 29 21:35:04 crc kubenswrapper[4911]: I0929 21:35:04.702931 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170vvfsx" Sep 29 21:35:05 crc kubenswrapper[4911]: I0929 21:35:05.177633 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170vvfsx"] Sep 29 21:35:05 crc kubenswrapper[4911]: I0929 21:35:05.326033 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170vvfsx" event={"ID":"e417e0ba-3b3f-4700-8921-345cc400b7ba","Type":"ContainerStarted","Data":"09fbbefae2c1888a809986e906b4f5962ebf1cde6ef1046fe00a431a18ef29c8"} Sep 29 21:35:06 crc kubenswrapper[4911]: I0929 21:35:06.337617 4911 generic.go:334] "Generic (PLEG): container finished" podID="e417e0ba-3b3f-4700-8921-345cc400b7ba" containerID="1900e5b1a551924ba31e8ec7e655c15c28911934e506504941ec4aea0fab65c0" exitCode=0 Sep 29 21:35:06 crc kubenswrapper[4911]: I0929 21:35:06.337695 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170vvfsx" event={"ID":"e417e0ba-3b3f-4700-8921-345cc400b7ba","Type":"ContainerDied","Data":"1900e5b1a551924ba31e8ec7e655c15c28911934e506504941ec4aea0fab65c0"} Sep 29 21:35:08 crc kubenswrapper[4911]: I0929 21:35:08.354711 4911 generic.go:334] "Generic (PLEG): container finished" podID="e417e0ba-3b3f-4700-8921-345cc400b7ba" containerID="e5a4b009c68b07429d5c1e1cc7cf3dd7fcb5141807a3a3f4f7fbb894ee4d36d6" exitCode=0 Sep 29 21:35:08 crc kubenswrapper[4911]: I0929 21:35:08.354776 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170vvfsx" event={"ID":"e417e0ba-3b3f-4700-8921-345cc400b7ba","Type":"ContainerDied","Data":"e5a4b009c68b07429d5c1e1cc7cf3dd7fcb5141807a3a3f4f7fbb894ee4d36d6"} Sep 29 21:35:08 crc kubenswrapper[4911]: I0929 21:35:08.700286 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c0frlgc" Sep 29 21:35:08 crc kubenswrapper[4911]: I0929 21:35:08.701172 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c0frlgc" Sep 29 21:35:08 crc kubenswrapper[4911]: I0929 21:35:08.960462 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c0frlgc"] Sep 29 21:35:08 crc kubenswrapper[4911]: W0929 21:35:08.967969 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod98ba0cf0_64a8_482d_af55_862d182fefdb.slice/crio-d65a3ef0c1617002237c6da5bebf4ea4e56b4a4dc40cd48db97d0228e43190d9 WatchSource:0}: Error finding container d65a3ef0c1617002237c6da5bebf4ea4e56b4a4dc40cd48db97d0228e43190d9: Status 404 returned error can't find the container with id d65a3ef0c1617002237c6da5bebf4ea4e56b4a4dc40cd48db97d0228e43190d9 Sep 29 21:35:09 crc kubenswrapper[4911]: I0929 21:35:09.363972 4911 generic.go:334] "Generic (PLEG): container finished" podID="98ba0cf0-64a8-482d-af55-862d182fefdb" containerID="630a22cb66bd6f12a6a2630e2570a2d5c240185f16c327d9976629ae05faa751" exitCode=0 Sep 29 21:35:09 crc kubenswrapper[4911]: I0929 21:35:09.364070 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c0frlgc" event={"ID":"98ba0cf0-64a8-482d-af55-862d182fefdb","Type":"ContainerDied","Data":"630a22cb66bd6f12a6a2630e2570a2d5c240185f16c327d9976629ae05faa751"} Sep 29 21:35:09 crc kubenswrapper[4911]: I0929 21:35:09.364101 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c0frlgc" event={"ID":"98ba0cf0-64a8-482d-af55-862d182fefdb","Type":"ContainerStarted","Data":"d65a3ef0c1617002237c6da5bebf4ea4e56b4a4dc40cd48db97d0228e43190d9"} Sep 29 21:35:09 crc kubenswrapper[4911]: I0929 21:35:09.368739 4911 generic.go:334] "Generic (PLEG): container finished" podID="e417e0ba-3b3f-4700-8921-345cc400b7ba" containerID="1e10e79f32a6a4e110583c88dc01638c075d5f51bf36365024024e23d4c27ad4" exitCode=0 Sep 29 21:35:09 crc kubenswrapper[4911]: I0929 21:35:09.368774 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170vvfsx" event={"ID":"e417e0ba-3b3f-4700-8921-345cc400b7ba","Type":"ContainerDied","Data":"1e10e79f32a6a4e110583c88dc01638c075d5f51bf36365024024e23d4c27ad4"} Sep 29 21:35:10 crc kubenswrapper[4911]: I0929 21:35:10.694835 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170vvfsx" Sep 29 21:35:10 crc kubenswrapper[4911]: I0929 21:35:10.875578 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e417e0ba-3b3f-4700-8921-345cc400b7ba-util\") pod \"e417e0ba-3b3f-4700-8921-345cc400b7ba\" (UID: \"e417e0ba-3b3f-4700-8921-345cc400b7ba\") " Sep 29 21:35:10 crc kubenswrapper[4911]: I0929 21:35:10.875710 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e417e0ba-3b3f-4700-8921-345cc400b7ba-bundle\") pod \"e417e0ba-3b3f-4700-8921-345cc400b7ba\" (UID: \"e417e0ba-3b3f-4700-8921-345cc400b7ba\") " Sep 29 21:35:10 crc kubenswrapper[4911]: I0929 21:35:10.875805 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xsj7x\" (UniqueName: \"kubernetes.io/projected/e417e0ba-3b3f-4700-8921-345cc400b7ba-kube-api-access-xsj7x\") pod \"e417e0ba-3b3f-4700-8921-345cc400b7ba\" (UID: \"e417e0ba-3b3f-4700-8921-345cc400b7ba\") " Sep 29 21:35:10 crc kubenswrapper[4911]: I0929 21:35:10.876820 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e417e0ba-3b3f-4700-8921-345cc400b7ba-bundle" (OuterVolumeSpecName: "bundle") pod "e417e0ba-3b3f-4700-8921-345cc400b7ba" (UID: "e417e0ba-3b3f-4700-8921-345cc400b7ba"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:35:10 crc kubenswrapper[4911]: I0929 21:35:10.897131 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e417e0ba-3b3f-4700-8921-345cc400b7ba-kube-api-access-xsj7x" (OuterVolumeSpecName: "kube-api-access-xsj7x") pod "e417e0ba-3b3f-4700-8921-345cc400b7ba" (UID: "e417e0ba-3b3f-4700-8921-345cc400b7ba"). InnerVolumeSpecName "kube-api-access-xsj7x". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:35:10 crc kubenswrapper[4911]: I0929 21:35:10.898733 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e417e0ba-3b3f-4700-8921-345cc400b7ba-util" (OuterVolumeSpecName: "util") pod "e417e0ba-3b3f-4700-8921-345cc400b7ba" (UID: "e417e0ba-3b3f-4700-8921-345cc400b7ba"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:35:10 crc kubenswrapper[4911]: I0929 21:35:10.977293 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xsj7x\" (UniqueName: \"kubernetes.io/projected/e417e0ba-3b3f-4700-8921-345cc400b7ba-kube-api-access-xsj7x\") on node \"crc\" DevicePath \"\"" Sep 29 21:35:10 crc kubenswrapper[4911]: I0929 21:35:10.977373 4911 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e417e0ba-3b3f-4700-8921-345cc400b7ba-util\") on node \"crc\" DevicePath \"\"" Sep 29 21:35:10 crc kubenswrapper[4911]: I0929 21:35:10.977390 4911 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e417e0ba-3b3f-4700-8921-345cc400b7ba-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 21:35:11 crc kubenswrapper[4911]: I0929 21:35:11.387918 4911 generic.go:334] "Generic (PLEG): container finished" podID="98ba0cf0-64a8-482d-af55-862d182fefdb" containerID="d77f5eb5d14c3fea11e1e36e4385c772d961a8f4d96a0b353bb72fcf0d0d1fd9" exitCode=0 Sep 29 21:35:11 crc kubenswrapper[4911]: I0929 21:35:11.388061 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c0frlgc" event={"ID":"98ba0cf0-64a8-482d-af55-862d182fefdb","Type":"ContainerDied","Data":"d77f5eb5d14c3fea11e1e36e4385c772d961a8f4d96a0b353bb72fcf0d0d1fd9"} Sep 29 21:35:11 crc kubenswrapper[4911]: I0929 21:35:11.393026 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170vvfsx" event={"ID":"e417e0ba-3b3f-4700-8921-345cc400b7ba","Type":"ContainerDied","Data":"09fbbefae2c1888a809986e906b4f5962ebf1cde6ef1046fe00a431a18ef29c8"} Sep 29 21:35:11 crc kubenswrapper[4911]: I0929 21:35:11.393067 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="09fbbefae2c1888a809986e906b4f5962ebf1cde6ef1046fe00a431a18ef29c8" Sep 29 21:35:11 crc kubenswrapper[4911]: I0929 21:35:11.393137 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170vvfsx" Sep 29 21:35:12 crc kubenswrapper[4911]: I0929 21:35:12.414740 4911 generic.go:334] "Generic (PLEG): container finished" podID="98ba0cf0-64a8-482d-af55-862d182fefdb" containerID="f80e3623a42e8a1f847c423d941cd4fe91f8c7bf81c653180914b6f536be4403" exitCode=0 Sep 29 21:35:12 crc kubenswrapper[4911]: I0929 21:35:12.415875 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c0frlgc" event={"ID":"98ba0cf0-64a8-482d-af55-862d182fefdb","Type":"ContainerDied","Data":"f80e3623a42e8a1f847c423d941cd4fe91f8c7bf81c653180914b6f536be4403"} Sep 29 21:35:13 crc kubenswrapper[4911]: I0929 21:35:13.748765 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c0frlgc" Sep 29 21:35:13 crc kubenswrapper[4911]: I0929 21:35:13.919609 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6x44f\" (UniqueName: \"kubernetes.io/projected/98ba0cf0-64a8-482d-af55-862d182fefdb-kube-api-access-6x44f\") pod \"98ba0cf0-64a8-482d-af55-862d182fefdb\" (UID: \"98ba0cf0-64a8-482d-af55-862d182fefdb\") " Sep 29 21:35:13 crc kubenswrapper[4911]: I0929 21:35:13.919842 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/98ba0cf0-64a8-482d-af55-862d182fefdb-bundle\") pod \"98ba0cf0-64a8-482d-af55-862d182fefdb\" (UID: \"98ba0cf0-64a8-482d-af55-862d182fefdb\") " Sep 29 21:35:13 crc kubenswrapper[4911]: I0929 21:35:13.919965 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/98ba0cf0-64a8-482d-af55-862d182fefdb-util\") pod \"98ba0cf0-64a8-482d-af55-862d182fefdb\" (UID: \"98ba0cf0-64a8-482d-af55-862d182fefdb\") " Sep 29 21:35:13 crc kubenswrapper[4911]: I0929 21:35:13.920909 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/98ba0cf0-64a8-482d-af55-862d182fefdb-bundle" (OuterVolumeSpecName: "bundle") pod "98ba0cf0-64a8-482d-af55-862d182fefdb" (UID: "98ba0cf0-64a8-482d-af55-862d182fefdb"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:35:13 crc kubenswrapper[4911]: I0929 21:35:13.927963 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/98ba0cf0-64a8-482d-af55-862d182fefdb-kube-api-access-6x44f" (OuterVolumeSpecName: "kube-api-access-6x44f") pod "98ba0cf0-64a8-482d-af55-862d182fefdb" (UID: "98ba0cf0-64a8-482d-af55-862d182fefdb"). InnerVolumeSpecName "kube-api-access-6x44f". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:35:13 crc kubenswrapper[4911]: I0929 21:35:13.936079 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/98ba0cf0-64a8-482d-af55-862d182fefdb-util" (OuterVolumeSpecName: "util") pod "98ba0cf0-64a8-482d-af55-862d182fefdb" (UID: "98ba0cf0-64a8-482d-af55-862d182fefdb"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:35:14 crc kubenswrapper[4911]: I0929 21:35:14.021119 4911 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/98ba0cf0-64a8-482d-af55-862d182fefdb-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 21:35:14 crc kubenswrapper[4911]: I0929 21:35:14.021162 4911 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/98ba0cf0-64a8-482d-af55-862d182fefdb-util\") on node \"crc\" DevicePath \"\"" Sep 29 21:35:14 crc kubenswrapper[4911]: I0929 21:35:14.021172 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6x44f\" (UniqueName: \"kubernetes.io/projected/98ba0cf0-64a8-482d-af55-862d182fefdb-kube-api-access-6x44f\") on node \"crc\" DevicePath \"\"" Sep 29 21:35:14 crc kubenswrapper[4911]: I0929 21:35:14.433383 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c0frlgc" event={"ID":"98ba0cf0-64a8-482d-af55-862d182fefdb","Type":"ContainerDied","Data":"d65a3ef0c1617002237c6da5bebf4ea4e56b4a4dc40cd48db97d0228e43190d9"} Sep 29 21:35:14 crc kubenswrapper[4911]: I0929 21:35:14.433435 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d65a3ef0c1617002237c6da5bebf4ea4e56b4a4dc40cd48db97d0228e43190d9" Sep 29 21:35:14 crc kubenswrapper[4911]: I0929 21:35:14.433480 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c0frlgc" Sep 29 21:35:16 crc kubenswrapper[4911]: I0929 21:35:16.249770 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-logging/cluster-logging-operator-fcc886d58-nw5qw"] Sep 29 21:35:16 crc kubenswrapper[4911]: E0929 21:35:16.251405 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98ba0cf0-64a8-482d-af55-862d182fefdb" containerName="util" Sep 29 21:35:16 crc kubenswrapper[4911]: I0929 21:35:16.251473 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="98ba0cf0-64a8-482d-af55-862d182fefdb" containerName="util" Sep 29 21:35:16 crc kubenswrapper[4911]: E0929 21:35:16.251547 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e417e0ba-3b3f-4700-8921-345cc400b7ba" containerName="pull" Sep 29 21:35:16 crc kubenswrapper[4911]: I0929 21:35:16.251608 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="e417e0ba-3b3f-4700-8921-345cc400b7ba" containerName="pull" Sep 29 21:35:16 crc kubenswrapper[4911]: E0929 21:35:16.251670 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e417e0ba-3b3f-4700-8921-345cc400b7ba" containerName="util" Sep 29 21:35:16 crc kubenswrapper[4911]: I0929 21:35:16.251722 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="e417e0ba-3b3f-4700-8921-345cc400b7ba" containerName="util" Sep 29 21:35:16 crc kubenswrapper[4911]: E0929 21:35:16.251777 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98ba0cf0-64a8-482d-af55-862d182fefdb" containerName="extract" Sep 29 21:35:16 crc kubenswrapper[4911]: I0929 21:35:16.251845 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="98ba0cf0-64a8-482d-af55-862d182fefdb" containerName="extract" Sep 29 21:35:16 crc kubenswrapper[4911]: E0929 21:35:16.251904 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98ba0cf0-64a8-482d-af55-862d182fefdb" containerName="pull" Sep 29 21:35:16 crc kubenswrapper[4911]: I0929 21:35:16.251957 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="98ba0cf0-64a8-482d-af55-862d182fefdb" containerName="pull" Sep 29 21:35:16 crc kubenswrapper[4911]: E0929 21:35:16.252011 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e417e0ba-3b3f-4700-8921-345cc400b7ba" containerName="extract" Sep 29 21:35:16 crc kubenswrapper[4911]: I0929 21:35:16.252065 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="e417e0ba-3b3f-4700-8921-345cc400b7ba" containerName="extract" Sep 29 21:35:16 crc kubenswrapper[4911]: I0929 21:35:16.252291 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="98ba0cf0-64a8-482d-af55-862d182fefdb" containerName="extract" Sep 29 21:35:16 crc kubenswrapper[4911]: I0929 21:35:16.252388 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="e417e0ba-3b3f-4700-8921-345cc400b7ba" containerName="extract" Sep 29 21:35:16 crc kubenswrapper[4911]: I0929 21:35:16.253047 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/cluster-logging-operator-fcc886d58-nw5qw" Sep 29 21:35:16 crc kubenswrapper[4911]: I0929 21:35:16.256919 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-logging"/"openshift-service-ca.crt" Sep 29 21:35:16 crc kubenswrapper[4911]: I0929 21:35:16.259031 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"cluster-logging-operator-dockercfg-m69b2" Sep 29 21:35:16 crc kubenswrapper[4911]: I0929 21:35:16.259725 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-logging"/"kube-root-ca.crt" Sep 29 21:35:16 crc kubenswrapper[4911]: I0929 21:35:16.271889 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/cluster-logging-operator-fcc886d58-nw5qw"] Sep 29 21:35:16 crc kubenswrapper[4911]: I0929 21:35:16.355473 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l8dkr\" (UniqueName: \"kubernetes.io/projected/ba2c473e-d26f-435d-b673-24026f131de8-kube-api-access-l8dkr\") pod \"cluster-logging-operator-fcc886d58-nw5qw\" (UID: \"ba2c473e-d26f-435d-b673-24026f131de8\") " pod="openshift-logging/cluster-logging-operator-fcc886d58-nw5qw" Sep 29 21:35:16 crc kubenswrapper[4911]: I0929 21:35:16.457408 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l8dkr\" (UniqueName: \"kubernetes.io/projected/ba2c473e-d26f-435d-b673-24026f131de8-kube-api-access-l8dkr\") pod \"cluster-logging-operator-fcc886d58-nw5qw\" (UID: \"ba2c473e-d26f-435d-b673-24026f131de8\") " pod="openshift-logging/cluster-logging-operator-fcc886d58-nw5qw" Sep 29 21:35:16 crc kubenswrapper[4911]: I0929 21:35:16.483280 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l8dkr\" (UniqueName: \"kubernetes.io/projected/ba2c473e-d26f-435d-b673-24026f131de8-kube-api-access-l8dkr\") pod \"cluster-logging-operator-fcc886d58-nw5qw\" (UID: \"ba2c473e-d26f-435d-b673-24026f131de8\") " pod="openshift-logging/cluster-logging-operator-fcc886d58-nw5qw" Sep 29 21:35:16 crc kubenswrapper[4911]: I0929 21:35:16.568512 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/cluster-logging-operator-fcc886d58-nw5qw" Sep 29 21:35:16 crc kubenswrapper[4911]: I0929 21:35:16.800379 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/cluster-logging-operator-fcc886d58-nw5qw"] Sep 29 21:35:16 crc kubenswrapper[4911]: W0929 21:35:16.825050 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podba2c473e_d26f_435d_b673_24026f131de8.slice/crio-57efa323eaa0d774bcfeef8fa5db6e8f4cad437ddfd407a773202f52dcc92742 WatchSource:0}: Error finding container 57efa323eaa0d774bcfeef8fa5db6e8f4cad437ddfd407a773202f52dcc92742: Status 404 returned error can't find the container with id 57efa323eaa0d774bcfeef8fa5db6e8f4cad437ddfd407a773202f52dcc92742 Sep 29 21:35:17 crc kubenswrapper[4911]: I0929 21:35:17.455986 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/cluster-logging-operator-fcc886d58-nw5qw" event={"ID":"ba2c473e-d26f-435d-b673-24026f131de8","Type":"ContainerStarted","Data":"57efa323eaa0d774bcfeef8fa5db6e8f4cad437ddfd407a773202f52dcc92742"} Sep 29 21:35:22 crc kubenswrapper[4911]: I0929 21:35:22.509057 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/cluster-logging-operator-fcc886d58-nw5qw" event={"ID":"ba2c473e-d26f-435d-b673-24026f131de8","Type":"ContainerStarted","Data":"b3059d67a931b9134c9907d61bcf25a328fca496556c85485be01dc081fd44f6"} Sep 29 21:35:22 crc kubenswrapper[4911]: I0929 21:35:22.533335 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-logging/cluster-logging-operator-fcc886d58-nw5qw" podStartSLOduration=1.541033732 podStartE2EDuration="6.533309786s" podCreationTimestamp="2025-09-29 21:35:16 +0000 UTC" firstStartedPulling="2025-09-29 21:35:16.828904366 +0000 UTC m=+594.806017047" lastFinishedPulling="2025-09-29 21:35:21.82118039 +0000 UTC m=+599.798293101" observedRunningTime="2025-09-29 21:35:22.530631601 +0000 UTC m=+600.507744292" watchObservedRunningTime="2025-09-29 21:35:22.533309786 +0000 UTC m=+600.510422487" Sep 29 21:35:22 crc kubenswrapper[4911]: I0929 21:35:22.973985 4911 scope.go:117] "RemoveContainer" containerID="bc7df21f84f39b5c19f6039c6a102c478f158684a5952a968322654eecec14fd" Sep 29 21:35:23 crc kubenswrapper[4911]: I0929 21:35:23.519990 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-lrfbg_1179c900-e866-4c5a-bb06-6032cc03a075/kube-multus/2.log" Sep 29 21:35:27 crc kubenswrapper[4911]: I0929 21:35:27.678446 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators-redhat/loki-operator-controller-manager-7797b598cb-5xvfp"] Sep 29 21:35:27 crc kubenswrapper[4911]: I0929 21:35:27.679947 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators-redhat/loki-operator-controller-manager-7797b598cb-5xvfp" Sep 29 21:35:27 crc kubenswrapper[4911]: I0929 21:35:27.681628 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators-redhat"/"loki-operator-metrics" Sep 29 21:35:27 crc kubenswrapper[4911]: I0929 21:35:27.681825 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators-redhat"/"openshift-service-ca.crt" Sep 29 21:35:27 crc kubenswrapper[4911]: I0929 21:35:27.683433 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators-redhat"/"loki-operator-controller-manager-service-cert" Sep 29 21:35:27 crc kubenswrapper[4911]: I0929 21:35:27.683668 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators-redhat"/"kube-root-ca.crt" Sep 29 21:35:27 crc kubenswrapper[4911]: I0929 21:35:27.683805 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators-redhat"/"loki-operator-manager-config" Sep 29 21:35:27 crc kubenswrapper[4911]: I0929 21:35:27.684078 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators-redhat"/"loki-operator-controller-manager-dockercfg-mh8ql" Sep 29 21:35:27 crc kubenswrapper[4911]: I0929 21:35:27.700509 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators-redhat/loki-operator-controller-manager-7797b598cb-5xvfp"] Sep 29 21:35:27 crc kubenswrapper[4911]: I0929 21:35:27.821818 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/3ded07bd-2737-4d5d-8265-6c2e38f653d8-apiservice-cert\") pod \"loki-operator-controller-manager-7797b598cb-5xvfp\" (UID: \"3ded07bd-2737-4d5d-8265-6c2e38f653d8\") " pod="openshift-operators-redhat/loki-operator-controller-manager-7797b598cb-5xvfp" Sep 29 21:35:27 crc kubenswrapper[4911]: I0929 21:35:27.821882 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"loki-operator-metrics-cert\" (UniqueName: \"kubernetes.io/secret/3ded07bd-2737-4d5d-8265-6c2e38f653d8-loki-operator-metrics-cert\") pod \"loki-operator-controller-manager-7797b598cb-5xvfp\" (UID: \"3ded07bd-2737-4d5d-8265-6c2e38f653d8\") " pod="openshift-operators-redhat/loki-operator-controller-manager-7797b598cb-5xvfp" Sep 29 21:35:27 crc kubenswrapper[4911]: I0929 21:35:27.821922 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manager-config\" (UniqueName: \"kubernetes.io/configmap/3ded07bd-2737-4d5d-8265-6c2e38f653d8-manager-config\") pod \"loki-operator-controller-manager-7797b598cb-5xvfp\" (UID: \"3ded07bd-2737-4d5d-8265-6c2e38f653d8\") " pod="openshift-operators-redhat/loki-operator-controller-manager-7797b598cb-5xvfp" Sep 29 21:35:27 crc kubenswrapper[4911]: I0929 21:35:27.821943 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/3ded07bd-2737-4d5d-8265-6c2e38f653d8-webhook-cert\") pod \"loki-operator-controller-manager-7797b598cb-5xvfp\" (UID: \"3ded07bd-2737-4d5d-8265-6c2e38f653d8\") " pod="openshift-operators-redhat/loki-operator-controller-manager-7797b598cb-5xvfp" Sep 29 21:35:27 crc kubenswrapper[4911]: I0929 21:35:27.822189 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mtl7r\" (UniqueName: \"kubernetes.io/projected/3ded07bd-2737-4d5d-8265-6c2e38f653d8-kube-api-access-mtl7r\") pod \"loki-operator-controller-manager-7797b598cb-5xvfp\" (UID: \"3ded07bd-2737-4d5d-8265-6c2e38f653d8\") " pod="openshift-operators-redhat/loki-operator-controller-manager-7797b598cb-5xvfp" Sep 29 21:35:27 crc kubenswrapper[4911]: I0929 21:35:27.923224 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mtl7r\" (UniqueName: \"kubernetes.io/projected/3ded07bd-2737-4d5d-8265-6c2e38f653d8-kube-api-access-mtl7r\") pod \"loki-operator-controller-manager-7797b598cb-5xvfp\" (UID: \"3ded07bd-2737-4d5d-8265-6c2e38f653d8\") " pod="openshift-operators-redhat/loki-operator-controller-manager-7797b598cb-5xvfp" Sep 29 21:35:27 crc kubenswrapper[4911]: I0929 21:35:27.923276 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/3ded07bd-2737-4d5d-8265-6c2e38f653d8-apiservice-cert\") pod \"loki-operator-controller-manager-7797b598cb-5xvfp\" (UID: \"3ded07bd-2737-4d5d-8265-6c2e38f653d8\") " pod="openshift-operators-redhat/loki-operator-controller-manager-7797b598cb-5xvfp" Sep 29 21:35:27 crc kubenswrapper[4911]: I0929 21:35:27.923295 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"loki-operator-metrics-cert\" (UniqueName: \"kubernetes.io/secret/3ded07bd-2737-4d5d-8265-6c2e38f653d8-loki-operator-metrics-cert\") pod \"loki-operator-controller-manager-7797b598cb-5xvfp\" (UID: \"3ded07bd-2737-4d5d-8265-6c2e38f653d8\") " pod="openshift-operators-redhat/loki-operator-controller-manager-7797b598cb-5xvfp" Sep 29 21:35:27 crc kubenswrapper[4911]: I0929 21:35:27.923331 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manager-config\" (UniqueName: \"kubernetes.io/configmap/3ded07bd-2737-4d5d-8265-6c2e38f653d8-manager-config\") pod \"loki-operator-controller-manager-7797b598cb-5xvfp\" (UID: \"3ded07bd-2737-4d5d-8265-6c2e38f653d8\") " pod="openshift-operators-redhat/loki-operator-controller-manager-7797b598cb-5xvfp" Sep 29 21:35:27 crc kubenswrapper[4911]: I0929 21:35:27.923351 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/3ded07bd-2737-4d5d-8265-6c2e38f653d8-webhook-cert\") pod \"loki-operator-controller-manager-7797b598cb-5xvfp\" (UID: \"3ded07bd-2737-4d5d-8265-6c2e38f653d8\") " pod="openshift-operators-redhat/loki-operator-controller-manager-7797b598cb-5xvfp" Sep 29 21:35:27 crc kubenswrapper[4911]: I0929 21:35:27.924265 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manager-config\" (UniqueName: \"kubernetes.io/configmap/3ded07bd-2737-4d5d-8265-6c2e38f653d8-manager-config\") pod \"loki-operator-controller-manager-7797b598cb-5xvfp\" (UID: \"3ded07bd-2737-4d5d-8265-6c2e38f653d8\") " pod="openshift-operators-redhat/loki-operator-controller-manager-7797b598cb-5xvfp" Sep 29 21:35:27 crc kubenswrapper[4911]: I0929 21:35:27.929372 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"loki-operator-metrics-cert\" (UniqueName: \"kubernetes.io/secret/3ded07bd-2737-4d5d-8265-6c2e38f653d8-loki-operator-metrics-cert\") pod \"loki-operator-controller-manager-7797b598cb-5xvfp\" (UID: \"3ded07bd-2737-4d5d-8265-6c2e38f653d8\") " pod="openshift-operators-redhat/loki-operator-controller-manager-7797b598cb-5xvfp" Sep 29 21:35:27 crc kubenswrapper[4911]: I0929 21:35:27.929729 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/3ded07bd-2737-4d5d-8265-6c2e38f653d8-webhook-cert\") pod \"loki-operator-controller-manager-7797b598cb-5xvfp\" (UID: \"3ded07bd-2737-4d5d-8265-6c2e38f653d8\") " pod="openshift-operators-redhat/loki-operator-controller-manager-7797b598cb-5xvfp" Sep 29 21:35:27 crc kubenswrapper[4911]: I0929 21:35:27.942942 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/3ded07bd-2737-4d5d-8265-6c2e38f653d8-apiservice-cert\") pod \"loki-operator-controller-manager-7797b598cb-5xvfp\" (UID: \"3ded07bd-2737-4d5d-8265-6c2e38f653d8\") " pod="openshift-operators-redhat/loki-operator-controller-manager-7797b598cb-5xvfp" Sep 29 21:35:27 crc kubenswrapper[4911]: I0929 21:35:27.944928 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mtl7r\" (UniqueName: \"kubernetes.io/projected/3ded07bd-2737-4d5d-8265-6c2e38f653d8-kube-api-access-mtl7r\") pod \"loki-operator-controller-manager-7797b598cb-5xvfp\" (UID: \"3ded07bd-2737-4d5d-8265-6c2e38f653d8\") " pod="openshift-operators-redhat/loki-operator-controller-manager-7797b598cb-5xvfp" Sep 29 21:35:27 crc kubenswrapper[4911]: I0929 21:35:27.996937 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators-redhat/loki-operator-controller-manager-7797b598cb-5xvfp" Sep 29 21:35:28 crc kubenswrapper[4911]: I0929 21:35:28.388081 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators-redhat/loki-operator-controller-manager-7797b598cb-5xvfp"] Sep 29 21:35:28 crc kubenswrapper[4911]: W0929 21:35:28.393419 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3ded07bd_2737_4d5d_8265_6c2e38f653d8.slice/crio-ff73a5f98cd522a41f754aea6d0a68da4263f6c5765acc96cb783ecf8fe006bd WatchSource:0}: Error finding container ff73a5f98cd522a41f754aea6d0a68da4263f6c5765acc96cb783ecf8fe006bd: Status 404 returned error can't find the container with id ff73a5f98cd522a41f754aea6d0a68da4263f6c5765acc96cb783ecf8fe006bd Sep 29 21:35:28 crc kubenswrapper[4911]: I0929 21:35:28.547573 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators-redhat/loki-operator-controller-manager-7797b598cb-5xvfp" event={"ID":"3ded07bd-2737-4d5d-8265-6c2e38f653d8","Type":"ContainerStarted","Data":"ff73a5f98cd522a41f754aea6d0a68da4263f6c5765acc96cb783ecf8fe006bd"} Sep 29 21:35:32 crc kubenswrapper[4911]: I0929 21:35:32.577091 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators-redhat/loki-operator-controller-manager-7797b598cb-5xvfp" event={"ID":"3ded07bd-2737-4d5d-8265-6c2e38f653d8","Type":"ContainerStarted","Data":"cb1f2c9085846db3362550b977bf5f183c2b2fc04c255e8dcc638cba4040601c"} Sep 29 21:35:38 crc kubenswrapper[4911]: I0929 21:35:38.629780 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators-redhat/loki-operator-controller-manager-7797b598cb-5xvfp" event={"ID":"3ded07bd-2737-4d5d-8265-6c2e38f653d8","Type":"ContainerStarted","Data":"515baf57719f9c843576fb917fd8e888ca5c39039c2397cdc93e8c0a53e16354"} Sep 29 21:35:38 crc kubenswrapper[4911]: I0929 21:35:38.630598 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators-redhat/loki-operator-controller-manager-7797b598cb-5xvfp" Sep 29 21:35:38 crc kubenswrapper[4911]: I0929 21:35:38.633636 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators-redhat/loki-operator-controller-manager-7797b598cb-5xvfp" Sep 29 21:35:38 crc kubenswrapper[4911]: I0929 21:35:38.659520 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators-redhat/loki-operator-controller-manager-7797b598cb-5xvfp" podStartSLOduration=2.27229609 podStartE2EDuration="11.659496547s" podCreationTimestamp="2025-09-29 21:35:27 +0000 UTC" firstStartedPulling="2025-09-29 21:35:28.396344848 +0000 UTC m=+606.373457519" lastFinishedPulling="2025-09-29 21:35:37.783545295 +0000 UTC m=+615.760657976" observedRunningTime="2025-09-29 21:35:38.656532763 +0000 UTC m=+616.633645474" watchObservedRunningTime="2025-09-29 21:35:38.659496547 +0000 UTC m=+616.636609248" Sep 29 21:35:43 crc kubenswrapper[4911]: I0929 21:35:43.462726 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["minio-dev/minio"] Sep 29 21:35:43 crc kubenswrapper[4911]: I0929 21:35:43.463908 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="minio-dev/minio" Sep 29 21:35:43 crc kubenswrapper[4911]: I0929 21:35:43.467186 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"minio-dev"/"kube-root-ca.crt" Sep 29 21:35:43 crc kubenswrapper[4911]: I0929 21:35:43.467986 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"minio-dev"/"openshift-service-ca.crt" Sep 29 21:35:43 crc kubenswrapper[4911]: I0929 21:35:43.484162 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["minio-dev/minio"] Sep 29 21:35:43 crc kubenswrapper[4911]: I0929 21:35:43.655159 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8vhvq\" (UniqueName: \"kubernetes.io/projected/4d24a9e3-ab5c-4a41-8942-1efac66a6d73-kube-api-access-8vhvq\") pod \"minio\" (UID: \"4d24a9e3-ab5c-4a41-8942-1efac66a6d73\") " pod="minio-dev/minio" Sep 29 21:35:43 crc kubenswrapper[4911]: I0929 21:35:43.655391 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-dfc2ea90-cbe0-4d07-9aa7-5fbcd3783a46\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-dfc2ea90-cbe0-4d07-9aa7-5fbcd3783a46\") pod \"minio\" (UID: \"4d24a9e3-ab5c-4a41-8942-1efac66a6d73\") " pod="minio-dev/minio" Sep 29 21:35:43 crc kubenswrapper[4911]: I0929 21:35:43.756846 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-dfc2ea90-cbe0-4d07-9aa7-5fbcd3783a46\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-dfc2ea90-cbe0-4d07-9aa7-5fbcd3783a46\") pod \"minio\" (UID: \"4d24a9e3-ab5c-4a41-8942-1efac66a6d73\") " pod="minio-dev/minio" Sep 29 21:35:43 crc kubenswrapper[4911]: I0929 21:35:43.757627 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8vhvq\" (UniqueName: \"kubernetes.io/projected/4d24a9e3-ab5c-4a41-8942-1efac66a6d73-kube-api-access-8vhvq\") pod \"minio\" (UID: \"4d24a9e3-ab5c-4a41-8942-1efac66a6d73\") " pod="minio-dev/minio" Sep 29 21:35:43 crc kubenswrapper[4911]: I0929 21:35:43.762357 4911 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 29 21:35:43 crc kubenswrapper[4911]: I0929 21:35:43.762415 4911 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-dfc2ea90-cbe0-4d07-9aa7-5fbcd3783a46\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-dfc2ea90-cbe0-4d07-9aa7-5fbcd3783a46\") pod \"minio\" (UID: \"4d24a9e3-ab5c-4a41-8942-1efac66a6d73\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/fcce6c4ccf47628e16d28fa0bfc680bc817feff108a05d751de7fe1dcc47c3d9/globalmount\"" pod="minio-dev/minio" Sep 29 21:35:43 crc kubenswrapper[4911]: I0929 21:35:43.793330 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-dfc2ea90-cbe0-4d07-9aa7-5fbcd3783a46\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-dfc2ea90-cbe0-4d07-9aa7-5fbcd3783a46\") pod \"minio\" (UID: \"4d24a9e3-ab5c-4a41-8942-1efac66a6d73\") " pod="minio-dev/minio" Sep 29 21:35:43 crc kubenswrapper[4911]: I0929 21:35:43.799970 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8vhvq\" (UniqueName: \"kubernetes.io/projected/4d24a9e3-ab5c-4a41-8942-1efac66a6d73-kube-api-access-8vhvq\") pod \"minio\" (UID: \"4d24a9e3-ab5c-4a41-8942-1efac66a6d73\") " pod="minio-dev/minio" Sep 29 21:35:44 crc kubenswrapper[4911]: I0929 21:35:44.096032 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="minio-dev/minio" Sep 29 21:35:44 crc kubenswrapper[4911]: I0929 21:35:44.460763 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["minio-dev/minio"] Sep 29 21:35:44 crc kubenswrapper[4911]: I0929 21:35:44.665033 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="minio-dev/minio" event={"ID":"4d24a9e3-ab5c-4a41-8942-1efac66a6d73","Type":"ContainerStarted","Data":"e5e33001408c4dfe54f0cb6452844f61948bab146754d7a7cd7d5e6d14d879ee"} Sep 29 21:35:48 crc kubenswrapper[4911]: I0929 21:35:48.729583 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="minio-dev/minio" podStartSLOduration=4.302499173 podStartE2EDuration="7.729563859s" podCreationTimestamp="2025-09-29 21:35:41 +0000 UTC" firstStartedPulling="2025-09-29 21:35:44.467380192 +0000 UTC m=+622.444492863" lastFinishedPulling="2025-09-29 21:35:47.894444878 +0000 UTC m=+625.871557549" observedRunningTime="2025-09-29 21:35:48.729557119 +0000 UTC m=+626.706669800" watchObservedRunningTime="2025-09-29 21:35:48.729563859 +0000 UTC m=+626.706676540" Sep 29 21:35:48 crc kubenswrapper[4911]: I0929 21:35:48.734523 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="minio-dev/minio" event={"ID":"4d24a9e3-ab5c-4a41-8942-1efac66a6d73","Type":"ContainerStarted","Data":"aa45885493213993c643e39f966e3f5e78ab76d8b202d2f0cdfd5eb281060879"} Sep 29 21:35:55 crc kubenswrapper[4911]: I0929 21:35:55.654161 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-logging/collector-7kzkd"] Sep 29 21:35:55 crc kubenswrapper[4911]: I0929 21:35:55.656411 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/collector-7kzkd" Sep 29 21:35:55 crc kubenswrapper[4911]: I0929 21:35:55.660076 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"collector-metrics" Sep 29 21:35:55 crc kubenswrapper[4911]: I0929 21:35:55.660359 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"collector-dockercfg-57mzw" Sep 29 21:35:55 crc kubenswrapper[4911]: I0929 21:35:55.660379 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"collector-syslog-receiver" Sep 29 21:35:55 crc kubenswrapper[4911]: I0929 21:35:55.660889 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-logging"/"collector-config" Sep 29 21:35:55 crc kubenswrapper[4911]: I0929 21:35:55.660975 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"collector-token" Sep 29 21:35:55 crc kubenswrapper[4911]: I0929 21:35:55.676156 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-logging"/"collector-trustbundle" Sep 29 21:35:55 crc kubenswrapper[4911]: I0929 21:35:55.696490 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/collector-7kzkd"] Sep 29 21:35:55 crc kubenswrapper[4911]: I0929 21:35:55.831221 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/c102400c-fbea-45f2-a201-f7bae363f052-tmp\") pod \"collector-7kzkd\" (UID: \"c102400c-fbea-45f2-a201-f7bae363f052\") " pod="openshift-logging/collector-7kzkd" Sep 29 21:35:55 crc kubenswrapper[4911]: I0929 21:35:55.831272 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c102400c-fbea-45f2-a201-f7bae363f052-trusted-ca\") pod \"collector-7kzkd\" (UID: \"c102400c-fbea-45f2-a201-f7bae363f052\") " pod="openshift-logging/collector-7kzkd" Sep 29 21:35:55 crc kubenswrapper[4911]: I0929 21:35:55.831304 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/secret/c102400c-fbea-45f2-a201-f7bae363f052-metrics\") pod \"collector-7kzkd\" (UID: \"c102400c-fbea-45f2-a201-f7bae363f052\") " pod="openshift-logging/collector-7kzkd" Sep 29 21:35:55 crc kubenswrapper[4911]: I0929 21:35:55.831333 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"collector-syslog-receiver\" (UniqueName: \"kubernetes.io/secret/c102400c-fbea-45f2-a201-f7bae363f052-collector-syslog-receiver\") pod \"collector-7kzkd\" (UID: \"c102400c-fbea-45f2-a201-f7bae363f052\") " pod="openshift-logging/collector-7kzkd" Sep 29 21:35:55 crc kubenswrapper[4911]: I0929 21:35:55.831361 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sa-token\" (UniqueName: \"kubernetes.io/projected/c102400c-fbea-45f2-a201-f7bae363f052-sa-token\") pod \"collector-7kzkd\" (UID: \"c102400c-fbea-45f2-a201-f7bae363f052\") " pod="openshift-logging/collector-7kzkd" Sep 29 21:35:55 crc kubenswrapper[4911]: I0929 21:35:55.831464 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zlnnp\" (UniqueName: \"kubernetes.io/projected/c102400c-fbea-45f2-a201-f7bae363f052-kube-api-access-zlnnp\") pod \"collector-7kzkd\" (UID: \"c102400c-fbea-45f2-a201-f7bae363f052\") " pod="openshift-logging/collector-7kzkd" Sep 29 21:35:55 crc kubenswrapper[4911]: I0929 21:35:55.831500 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-openshift-service-cacrt\" (UniqueName: \"kubernetes.io/configmap/c102400c-fbea-45f2-a201-f7bae363f052-config-openshift-service-cacrt\") pod \"collector-7kzkd\" (UID: \"c102400c-fbea-45f2-a201-f7bae363f052\") " pod="openshift-logging/collector-7kzkd" Sep 29 21:35:55 crc kubenswrapper[4911]: I0929 21:35:55.831529 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"datadir\" (UniqueName: \"kubernetes.io/host-path/c102400c-fbea-45f2-a201-f7bae363f052-datadir\") pod \"collector-7kzkd\" (UID: \"c102400c-fbea-45f2-a201-f7bae363f052\") " pod="openshift-logging/collector-7kzkd" Sep 29 21:35:55 crc kubenswrapper[4911]: I0929 21:35:55.831559 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"entrypoint\" (UniqueName: \"kubernetes.io/configmap/c102400c-fbea-45f2-a201-f7bae363f052-entrypoint\") pod \"collector-7kzkd\" (UID: \"c102400c-fbea-45f2-a201-f7bae363f052\") " pod="openshift-logging/collector-7kzkd" Sep 29 21:35:55 crc kubenswrapper[4911]: I0929 21:35:55.831586 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c102400c-fbea-45f2-a201-f7bae363f052-config\") pod \"collector-7kzkd\" (UID: \"c102400c-fbea-45f2-a201-f7bae363f052\") " pod="openshift-logging/collector-7kzkd" Sep 29 21:35:55 crc kubenswrapper[4911]: I0929 21:35:55.831611 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"collector-token\" (UniqueName: \"kubernetes.io/secret/c102400c-fbea-45f2-a201-f7bae363f052-collector-token\") pod \"collector-7kzkd\" (UID: \"c102400c-fbea-45f2-a201-f7bae363f052\") " pod="openshift-logging/collector-7kzkd" Sep 29 21:35:55 crc kubenswrapper[4911]: I0929 21:35:55.933376 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zlnnp\" (UniqueName: \"kubernetes.io/projected/c102400c-fbea-45f2-a201-f7bae363f052-kube-api-access-zlnnp\") pod \"collector-7kzkd\" (UID: \"c102400c-fbea-45f2-a201-f7bae363f052\") " pod="openshift-logging/collector-7kzkd" Sep 29 21:35:55 crc kubenswrapper[4911]: I0929 21:35:55.933580 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-openshift-service-cacrt\" (UniqueName: \"kubernetes.io/configmap/c102400c-fbea-45f2-a201-f7bae363f052-config-openshift-service-cacrt\") pod \"collector-7kzkd\" (UID: \"c102400c-fbea-45f2-a201-f7bae363f052\") " pod="openshift-logging/collector-7kzkd" Sep 29 21:35:55 crc kubenswrapper[4911]: I0929 21:35:55.933643 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"datadir\" (UniqueName: \"kubernetes.io/host-path/c102400c-fbea-45f2-a201-f7bae363f052-datadir\") pod \"collector-7kzkd\" (UID: \"c102400c-fbea-45f2-a201-f7bae363f052\") " pod="openshift-logging/collector-7kzkd" Sep 29 21:35:55 crc kubenswrapper[4911]: I0929 21:35:55.933723 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"entrypoint\" (UniqueName: \"kubernetes.io/configmap/c102400c-fbea-45f2-a201-f7bae363f052-entrypoint\") pod \"collector-7kzkd\" (UID: \"c102400c-fbea-45f2-a201-f7bae363f052\") " pod="openshift-logging/collector-7kzkd" Sep 29 21:35:55 crc kubenswrapper[4911]: I0929 21:35:55.933787 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c102400c-fbea-45f2-a201-f7bae363f052-config\") pod \"collector-7kzkd\" (UID: \"c102400c-fbea-45f2-a201-f7bae363f052\") " pod="openshift-logging/collector-7kzkd" Sep 29 21:35:55 crc kubenswrapper[4911]: I0929 21:35:55.933860 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"collector-token\" (UniqueName: \"kubernetes.io/secret/c102400c-fbea-45f2-a201-f7bae363f052-collector-token\") pod \"collector-7kzkd\" (UID: \"c102400c-fbea-45f2-a201-f7bae363f052\") " pod="openshift-logging/collector-7kzkd" Sep 29 21:35:55 crc kubenswrapper[4911]: I0929 21:35:55.933980 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/c102400c-fbea-45f2-a201-f7bae363f052-tmp\") pod \"collector-7kzkd\" (UID: \"c102400c-fbea-45f2-a201-f7bae363f052\") " pod="openshift-logging/collector-7kzkd" Sep 29 21:35:55 crc kubenswrapper[4911]: I0929 21:35:55.934012 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c102400c-fbea-45f2-a201-f7bae363f052-trusted-ca\") pod \"collector-7kzkd\" (UID: \"c102400c-fbea-45f2-a201-f7bae363f052\") " pod="openshift-logging/collector-7kzkd" Sep 29 21:35:55 crc kubenswrapper[4911]: I0929 21:35:55.934064 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/secret/c102400c-fbea-45f2-a201-f7bae363f052-metrics\") pod \"collector-7kzkd\" (UID: \"c102400c-fbea-45f2-a201-f7bae363f052\") " pod="openshift-logging/collector-7kzkd" Sep 29 21:35:55 crc kubenswrapper[4911]: I0929 21:35:55.934115 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"collector-syslog-receiver\" (UniqueName: \"kubernetes.io/secret/c102400c-fbea-45f2-a201-f7bae363f052-collector-syslog-receiver\") pod \"collector-7kzkd\" (UID: \"c102400c-fbea-45f2-a201-f7bae363f052\") " pod="openshift-logging/collector-7kzkd" Sep 29 21:35:55 crc kubenswrapper[4911]: I0929 21:35:55.934151 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sa-token\" (UniqueName: \"kubernetes.io/projected/c102400c-fbea-45f2-a201-f7bae363f052-sa-token\") pod \"collector-7kzkd\" (UID: \"c102400c-fbea-45f2-a201-f7bae363f052\") " pod="openshift-logging/collector-7kzkd" Sep 29 21:35:55 crc kubenswrapper[4911]: I0929 21:35:55.934556 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-openshift-service-cacrt\" (UniqueName: \"kubernetes.io/configmap/c102400c-fbea-45f2-a201-f7bae363f052-config-openshift-service-cacrt\") pod \"collector-7kzkd\" (UID: \"c102400c-fbea-45f2-a201-f7bae363f052\") " pod="openshift-logging/collector-7kzkd" Sep 29 21:35:55 crc kubenswrapper[4911]: I0929 21:35:55.934842 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"entrypoint\" (UniqueName: \"kubernetes.io/configmap/c102400c-fbea-45f2-a201-f7bae363f052-entrypoint\") pod \"collector-7kzkd\" (UID: \"c102400c-fbea-45f2-a201-f7bae363f052\") " pod="openshift-logging/collector-7kzkd" Sep 29 21:35:55 crc kubenswrapper[4911]: I0929 21:35:55.935459 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c102400c-fbea-45f2-a201-f7bae363f052-trusted-ca\") pod \"collector-7kzkd\" (UID: \"c102400c-fbea-45f2-a201-f7bae363f052\") " pod="openshift-logging/collector-7kzkd" Sep 29 21:35:55 crc kubenswrapper[4911]: E0929 21:35:55.935550 4911 secret.go:188] Couldn't get secret openshift-logging/collector-syslog-receiver: secret "collector-syslog-receiver" not found Sep 29 21:35:55 crc kubenswrapper[4911]: I0929 21:35:55.935578 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"datadir\" (UniqueName: \"kubernetes.io/host-path/c102400c-fbea-45f2-a201-f7bae363f052-datadir\") pod \"collector-7kzkd\" (UID: \"c102400c-fbea-45f2-a201-f7bae363f052\") " pod="openshift-logging/collector-7kzkd" Sep 29 21:35:55 crc kubenswrapper[4911]: E0929 21:35:55.935599 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c102400c-fbea-45f2-a201-f7bae363f052-collector-syslog-receiver podName:c102400c-fbea-45f2-a201-f7bae363f052 nodeName:}" failed. No retries permitted until 2025-09-29 21:35:56.435581103 +0000 UTC m=+634.412693784 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "collector-syslog-receiver" (UniqueName: "kubernetes.io/secret/c102400c-fbea-45f2-a201-f7bae363f052-collector-syslog-receiver") pod "collector-7kzkd" (UID: "c102400c-fbea-45f2-a201-f7bae363f052") : secret "collector-syslog-receiver" not found Sep 29 21:35:55 crc kubenswrapper[4911]: I0929 21:35:55.935725 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c102400c-fbea-45f2-a201-f7bae363f052-config\") pod \"collector-7kzkd\" (UID: \"c102400c-fbea-45f2-a201-f7bae363f052\") " pod="openshift-logging/collector-7kzkd" Sep 29 21:35:55 crc kubenswrapper[4911]: I0929 21:35:55.941626 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"collector-token\" (UniqueName: \"kubernetes.io/secret/c102400c-fbea-45f2-a201-f7bae363f052-collector-token\") pod \"collector-7kzkd\" (UID: \"c102400c-fbea-45f2-a201-f7bae363f052\") " pod="openshift-logging/collector-7kzkd" Sep 29 21:35:55 crc kubenswrapper[4911]: I0929 21:35:55.942135 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/c102400c-fbea-45f2-a201-f7bae363f052-tmp\") pod \"collector-7kzkd\" (UID: \"c102400c-fbea-45f2-a201-f7bae363f052\") " pod="openshift-logging/collector-7kzkd" Sep 29 21:35:55 crc kubenswrapper[4911]: I0929 21:35:55.945396 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/secret/c102400c-fbea-45f2-a201-f7bae363f052-metrics\") pod \"collector-7kzkd\" (UID: \"c102400c-fbea-45f2-a201-f7bae363f052\") " pod="openshift-logging/collector-7kzkd" Sep 29 21:35:55 crc kubenswrapper[4911]: I0929 21:35:55.959603 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sa-token\" (UniqueName: \"kubernetes.io/projected/c102400c-fbea-45f2-a201-f7bae363f052-sa-token\") pod \"collector-7kzkd\" (UID: \"c102400c-fbea-45f2-a201-f7bae363f052\") " pod="openshift-logging/collector-7kzkd" Sep 29 21:35:55 crc kubenswrapper[4911]: I0929 21:35:55.965818 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zlnnp\" (UniqueName: \"kubernetes.io/projected/c102400c-fbea-45f2-a201-f7bae363f052-kube-api-access-zlnnp\") pod \"collector-7kzkd\" (UID: \"c102400c-fbea-45f2-a201-f7bae363f052\") " pod="openshift-logging/collector-7kzkd" Sep 29 21:35:56 crc kubenswrapper[4911]: I0929 21:35:56.443479 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"collector-syslog-receiver\" (UniqueName: \"kubernetes.io/secret/c102400c-fbea-45f2-a201-f7bae363f052-collector-syslog-receiver\") pod \"collector-7kzkd\" (UID: \"c102400c-fbea-45f2-a201-f7bae363f052\") " pod="openshift-logging/collector-7kzkd" Sep 29 21:35:56 crc kubenswrapper[4911]: I0929 21:35:56.449288 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"collector-syslog-receiver\" (UniqueName: \"kubernetes.io/secret/c102400c-fbea-45f2-a201-f7bae363f052-collector-syslog-receiver\") pod \"collector-7kzkd\" (UID: \"c102400c-fbea-45f2-a201-f7bae363f052\") " pod="openshift-logging/collector-7kzkd" Sep 29 21:35:56 crc kubenswrapper[4911]: I0929 21:35:56.590440 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/collector-7kzkd" Sep 29 21:35:56 crc kubenswrapper[4911]: I0929 21:35:56.890361 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/collector-7kzkd"] Sep 29 21:35:57 crc kubenswrapper[4911]: I0929 21:35:57.775341 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/collector-7kzkd" event={"ID":"c102400c-fbea-45f2-a201-f7bae363f052","Type":"ContainerStarted","Data":"8ef2ee61abe51027d49e150a99facd761f12fe24f296fdbc844b0779001c313d"} Sep 29 21:36:03 crc kubenswrapper[4911]: I0929 21:36:03.821560 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/collector-7kzkd" event={"ID":"c102400c-fbea-45f2-a201-f7bae363f052","Type":"ContainerStarted","Data":"26086a44a9ded3a747d895f69985c61abcb3fab21ca8bdc8db0dd75f02c5c88e"} Sep 29 21:36:03 crc kubenswrapper[4911]: I0929 21:36:03.858233 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-logging/collector-7kzkd" podStartSLOduration=2.482861791 podStartE2EDuration="8.858202395s" podCreationTimestamp="2025-09-29 21:35:55 +0000 UTC" firstStartedPulling="2025-09-29 21:35:56.908239884 +0000 UTC m=+634.885352565" lastFinishedPulling="2025-09-29 21:36:03.283580498 +0000 UTC m=+641.260693169" observedRunningTime="2025-09-29 21:36:03.855524679 +0000 UTC m=+641.832637430" watchObservedRunningTime="2025-09-29 21:36:03.858202395 +0000 UTC m=+641.835315146" Sep 29 21:36:12 crc kubenswrapper[4911]: I0929 21:36:12.940979 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc28wnr"] Sep 29 21:36:12 crc kubenswrapper[4911]: I0929 21:36:12.952514 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc28wnr" Sep 29 21:36:12 crc kubenswrapper[4911]: I0929 21:36:12.957117 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Sep 29 21:36:12 crc kubenswrapper[4911]: I0929 21:36:12.971151 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc28wnr"] Sep 29 21:36:12 crc kubenswrapper[4911]: I0929 21:36:12.996671 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jlwwp\" (UniqueName: \"kubernetes.io/projected/629693d2-2856-4eb7-9abc-dbee25234329-kube-api-access-jlwwp\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc28wnr\" (UID: \"629693d2-2856-4eb7-9abc-dbee25234329\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc28wnr" Sep 29 21:36:12 crc kubenswrapper[4911]: I0929 21:36:12.997055 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/629693d2-2856-4eb7-9abc-dbee25234329-bundle\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc28wnr\" (UID: \"629693d2-2856-4eb7-9abc-dbee25234329\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc28wnr" Sep 29 21:36:12 crc kubenswrapper[4911]: I0929 21:36:12.997174 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/629693d2-2856-4eb7-9abc-dbee25234329-util\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc28wnr\" (UID: \"629693d2-2856-4eb7-9abc-dbee25234329\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc28wnr" Sep 29 21:36:13 crc kubenswrapper[4911]: I0929 21:36:13.098066 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jlwwp\" (UniqueName: \"kubernetes.io/projected/629693d2-2856-4eb7-9abc-dbee25234329-kube-api-access-jlwwp\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc28wnr\" (UID: \"629693d2-2856-4eb7-9abc-dbee25234329\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc28wnr" Sep 29 21:36:13 crc kubenswrapper[4911]: I0929 21:36:13.098204 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/629693d2-2856-4eb7-9abc-dbee25234329-bundle\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc28wnr\" (UID: \"629693d2-2856-4eb7-9abc-dbee25234329\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc28wnr" Sep 29 21:36:13 crc kubenswrapper[4911]: I0929 21:36:13.098266 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/629693d2-2856-4eb7-9abc-dbee25234329-util\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc28wnr\" (UID: \"629693d2-2856-4eb7-9abc-dbee25234329\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc28wnr" Sep 29 21:36:13 crc kubenswrapper[4911]: I0929 21:36:13.099142 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/629693d2-2856-4eb7-9abc-dbee25234329-util\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc28wnr\" (UID: \"629693d2-2856-4eb7-9abc-dbee25234329\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc28wnr" Sep 29 21:36:13 crc kubenswrapper[4911]: I0929 21:36:13.099609 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/629693d2-2856-4eb7-9abc-dbee25234329-bundle\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc28wnr\" (UID: \"629693d2-2856-4eb7-9abc-dbee25234329\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc28wnr" Sep 29 21:36:13 crc kubenswrapper[4911]: I0929 21:36:13.124212 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jlwwp\" (UniqueName: \"kubernetes.io/projected/629693d2-2856-4eb7-9abc-dbee25234329-kube-api-access-jlwwp\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc28wnr\" (UID: \"629693d2-2856-4eb7-9abc-dbee25234329\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc28wnr" Sep 29 21:36:13 crc kubenswrapper[4911]: I0929 21:36:13.278469 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc28wnr" Sep 29 21:36:13 crc kubenswrapper[4911]: I0929 21:36:13.550181 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc28wnr"] Sep 29 21:36:13 crc kubenswrapper[4911]: I0929 21:36:13.897996 4911 generic.go:334] "Generic (PLEG): container finished" podID="629693d2-2856-4eb7-9abc-dbee25234329" containerID="de3d7133a1d5794105c178f40e5b80b8c489344b688af72cd8fedf95fccbcd35" exitCode=0 Sep 29 21:36:13 crc kubenswrapper[4911]: I0929 21:36:13.898069 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc28wnr" event={"ID":"629693d2-2856-4eb7-9abc-dbee25234329","Type":"ContainerDied","Data":"de3d7133a1d5794105c178f40e5b80b8c489344b688af72cd8fedf95fccbcd35"} Sep 29 21:36:13 crc kubenswrapper[4911]: I0929 21:36:13.898409 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc28wnr" event={"ID":"629693d2-2856-4eb7-9abc-dbee25234329","Type":"ContainerStarted","Data":"1c12ae9dd462a1e350b66d401214688b6e09b45519b908043eae7876acaf0fbf"} Sep 29 21:36:15 crc kubenswrapper[4911]: I0929 21:36:15.915524 4911 generic.go:334] "Generic (PLEG): container finished" podID="629693d2-2856-4eb7-9abc-dbee25234329" containerID="ae86fc796e9921adc0b82e5c9c5e93ccb00ab8df1b11c091d6ab0532253ffa90" exitCode=0 Sep 29 21:36:15 crc kubenswrapper[4911]: I0929 21:36:15.915917 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc28wnr" event={"ID":"629693d2-2856-4eb7-9abc-dbee25234329","Type":"ContainerDied","Data":"ae86fc796e9921adc0b82e5c9c5e93ccb00ab8df1b11c091d6ab0532253ffa90"} Sep 29 21:36:16 crc kubenswrapper[4911]: I0929 21:36:16.924174 4911 generic.go:334] "Generic (PLEG): container finished" podID="629693d2-2856-4eb7-9abc-dbee25234329" containerID="07c9bd4519db251123742b6f2d83dcafab9c4e1a3f04553e645ecd9f6368d438" exitCode=0 Sep 29 21:36:16 crc kubenswrapper[4911]: I0929 21:36:16.924236 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc28wnr" event={"ID":"629693d2-2856-4eb7-9abc-dbee25234329","Type":"ContainerDied","Data":"07c9bd4519db251123742b6f2d83dcafab9c4e1a3f04553e645ecd9f6368d438"} Sep 29 21:36:18 crc kubenswrapper[4911]: I0929 21:36:18.272913 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc28wnr" Sep 29 21:36:18 crc kubenswrapper[4911]: I0929 21:36:18.374540 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/629693d2-2856-4eb7-9abc-dbee25234329-util\") pod \"629693d2-2856-4eb7-9abc-dbee25234329\" (UID: \"629693d2-2856-4eb7-9abc-dbee25234329\") " Sep 29 21:36:18 crc kubenswrapper[4911]: I0929 21:36:18.374734 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/629693d2-2856-4eb7-9abc-dbee25234329-bundle\") pod \"629693d2-2856-4eb7-9abc-dbee25234329\" (UID: \"629693d2-2856-4eb7-9abc-dbee25234329\") " Sep 29 21:36:18 crc kubenswrapper[4911]: I0929 21:36:18.374785 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jlwwp\" (UniqueName: \"kubernetes.io/projected/629693d2-2856-4eb7-9abc-dbee25234329-kube-api-access-jlwwp\") pod \"629693d2-2856-4eb7-9abc-dbee25234329\" (UID: \"629693d2-2856-4eb7-9abc-dbee25234329\") " Sep 29 21:36:18 crc kubenswrapper[4911]: I0929 21:36:18.376073 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/629693d2-2856-4eb7-9abc-dbee25234329-bundle" (OuterVolumeSpecName: "bundle") pod "629693d2-2856-4eb7-9abc-dbee25234329" (UID: "629693d2-2856-4eb7-9abc-dbee25234329"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:36:18 crc kubenswrapper[4911]: I0929 21:36:18.384194 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/629693d2-2856-4eb7-9abc-dbee25234329-kube-api-access-jlwwp" (OuterVolumeSpecName: "kube-api-access-jlwwp") pod "629693d2-2856-4eb7-9abc-dbee25234329" (UID: "629693d2-2856-4eb7-9abc-dbee25234329"). InnerVolumeSpecName "kube-api-access-jlwwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:36:18 crc kubenswrapper[4911]: I0929 21:36:18.410320 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/629693d2-2856-4eb7-9abc-dbee25234329-util" (OuterVolumeSpecName: "util") pod "629693d2-2856-4eb7-9abc-dbee25234329" (UID: "629693d2-2856-4eb7-9abc-dbee25234329"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:36:18 crc kubenswrapper[4911]: I0929 21:36:18.476683 4911 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/629693d2-2856-4eb7-9abc-dbee25234329-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 21:36:18 crc kubenswrapper[4911]: I0929 21:36:18.476743 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jlwwp\" (UniqueName: \"kubernetes.io/projected/629693d2-2856-4eb7-9abc-dbee25234329-kube-api-access-jlwwp\") on node \"crc\" DevicePath \"\"" Sep 29 21:36:18 crc kubenswrapper[4911]: I0929 21:36:18.476766 4911 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/629693d2-2856-4eb7-9abc-dbee25234329-util\") on node \"crc\" DevicePath \"\"" Sep 29 21:36:18 crc kubenswrapper[4911]: I0929 21:36:18.943649 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc28wnr" event={"ID":"629693d2-2856-4eb7-9abc-dbee25234329","Type":"ContainerDied","Data":"1c12ae9dd462a1e350b66d401214688b6e09b45519b908043eae7876acaf0fbf"} Sep 29 21:36:18 crc kubenswrapper[4911]: I0929 21:36:18.943692 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1c12ae9dd462a1e350b66d401214688b6e09b45519b908043eae7876acaf0fbf" Sep 29 21:36:18 crc kubenswrapper[4911]: I0929 21:36:18.943820 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc28wnr" Sep 29 21:36:21 crc kubenswrapper[4911]: I0929 21:36:21.413111 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-5d6f6cfd66-rzfn8"] Sep 29 21:36:21 crc kubenswrapper[4911]: E0929 21:36:21.413726 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="629693d2-2856-4eb7-9abc-dbee25234329" containerName="extract" Sep 29 21:36:21 crc kubenswrapper[4911]: I0929 21:36:21.413746 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="629693d2-2856-4eb7-9abc-dbee25234329" containerName="extract" Sep 29 21:36:21 crc kubenswrapper[4911]: E0929 21:36:21.413775 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="629693d2-2856-4eb7-9abc-dbee25234329" containerName="util" Sep 29 21:36:21 crc kubenswrapper[4911]: I0929 21:36:21.413787 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="629693d2-2856-4eb7-9abc-dbee25234329" containerName="util" Sep 29 21:36:21 crc kubenswrapper[4911]: E0929 21:36:21.413842 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="629693d2-2856-4eb7-9abc-dbee25234329" containerName="pull" Sep 29 21:36:21 crc kubenswrapper[4911]: I0929 21:36:21.413857 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="629693d2-2856-4eb7-9abc-dbee25234329" containerName="pull" Sep 29 21:36:21 crc kubenswrapper[4911]: I0929 21:36:21.414036 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="629693d2-2856-4eb7-9abc-dbee25234329" containerName="extract" Sep 29 21:36:21 crc kubenswrapper[4911]: I0929 21:36:21.414673 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-rzfn8" Sep 29 21:36:21 crc kubenswrapper[4911]: I0929 21:36:21.416731 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-qg5kt" Sep 29 21:36:21 crc kubenswrapper[4911]: I0929 21:36:21.417427 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Sep 29 21:36:21 crc kubenswrapper[4911]: I0929 21:36:21.417483 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Sep 29 21:36:21 crc kubenswrapper[4911]: I0929 21:36:21.423427 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5d6f6cfd66-rzfn8"] Sep 29 21:36:21 crc kubenswrapper[4911]: I0929 21:36:21.519957 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d76ns\" (UniqueName: \"kubernetes.io/projected/46d5f12f-493e-4199-a9ce-649275408eff-kube-api-access-d76ns\") pod \"nmstate-operator-5d6f6cfd66-rzfn8\" (UID: \"46d5f12f-493e-4199-a9ce-649275408eff\") " pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-rzfn8" Sep 29 21:36:21 crc kubenswrapper[4911]: I0929 21:36:21.622166 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d76ns\" (UniqueName: \"kubernetes.io/projected/46d5f12f-493e-4199-a9ce-649275408eff-kube-api-access-d76ns\") pod \"nmstate-operator-5d6f6cfd66-rzfn8\" (UID: \"46d5f12f-493e-4199-a9ce-649275408eff\") " pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-rzfn8" Sep 29 21:36:21 crc kubenswrapper[4911]: I0929 21:36:21.647013 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d76ns\" (UniqueName: \"kubernetes.io/projected/46d5f12f-493e-4199-a9ce-649275408eff-kube-api-access-d76ns\") pod \"nmstate-operator-5d6f6cfd66-rzfn8\" (UID: \"46d5f12f-493e-4199-a9ce-649275408eff\") " pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-rzfn8" Sep 29 21:36:21 crc kubenswrapper[4911]: I0929 21:36:21.731047 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-rzfn8" Sep 29 21:36:22 crc kubenswrapper[4911]: I0929 21:36:22.044769 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5d6f6cfd66-rzfn8"] Sep 29 21:36:22 crc kubenswrapper[4911]: W0929 21:36:22.050804 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod46d5f12f_493e_4199_a9ce_649275408eff.slice/crio-994598f77f7f192f084db7a1a77e5f1e8f3eb6598cbe990108f2873239d38b65 WatchSource:0}: Error finding container 994598f77f7f192f084db7a1a77e5f1e8f3eb6598cbe990108f2873239d38b65: Status 404 returned error can't find the container with id 994598f77f7f192f084db7a1a77e5f1e8f3eb6598cbe990108f2873239d38b65 Sep 29 21:36:22 crc kubenswrapper[4911]: I0929 21:36:22.977403 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-rzfn8" event={"ID":"46d5f12f-493e-4199-a9ce-649275408eff","Type":"ContainerStarted","Data":"994598f77f7f192f084db7a1a77e5f1e8f3eb6598cbe990108f2873239d38b65"} Sep 29 21:36:25 crc kubenswrapper[4911]: I0929 21:36:25.006268 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-rzfn8" event={"ID":"46d5f12f-493e-4199-a9ce-649275408eff","Type":"ContainerStarted","Data":"32b9e086d112e2452fef4e4b89a4ab45cbbe5ed677a130aab9e8efc4c3a99b89"} Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.049014 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-rzfn8" podStartSLOduration=3.2784242 podStartE2EDuration="5.048994943s" podCreationTimestamp="2025-09-29 21:36:21 +0000 UTC" firstStartedPulling="2025-09-29 21:36:22.053336388 +0000 UTC m=+660.030449059" lastFinishedPulling="2025-09-29 21:36:23.823907101 +0000 UTC m=+661.801019802" observedRunningTime="2025-09-29 21:36:25.024267942 +0000 UTC m=+663.001380653" watchObservedRunningTime="2025-09-29 21:36:26.048994943 +0000 UTC m=+664.026107614" Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.050586 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-58fcddf996-jmsmg"] Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.051497 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-58fcddf996-jmsmg" Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.054208 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-7rxpz" Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.061835 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-58fcddf996-jmsmg"] Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.065269 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-6d689559c5-qdgf7"] Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.066552 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6d689559c5-qdgf7" Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.068022 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.079396 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6d689559c5-qdgf7"] Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.119026 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-m57sm"] Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.119716 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-m57sm" Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.183780 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/734625b7-c050-470e-92b7-6a4ab5de695e-tls-key-pair\") pod \"nmstate-webhook-6d689559c5-qdgf7\" (UID: \"734625b7-c050-470e-92b7-6a4ab5de695e\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-qdgf7" Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.183911 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vnv7r\" (UniqueName: \"kubernetes.io/projected/734625b7-c050-470e-92b7-6a4ab5de695e-kube-api-access-vnv7r\") pod \"nmstate-webhook-6d689559c5-qdgf7\" (UID: \"734625b7-c050-470e-92b7-6a4ab5de695e\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-qdgf7" Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.183933 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vgzc2\" (UniqueName: \"kubernetes.io/projected/0ae2d175-d9c4-4a66-9747-fd82fef23890-kube-api-access-vgzc2\") pod \"nmstate-metrics-58fcddf996-jmsmg\" (UID: \"0ae2d175-d9c4-4a66-9747-fd82fef23890\") " pod="openshift-nmstate/nmstate-metrics-58fcddf996-jmsmg" Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.194626 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-864bb6dfb5-rqglt"] Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.195417 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-rqglt" Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.198000 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-z8nnc" Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.198204 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.198369 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.202664 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-864bb6dfb5-rqglt"] Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.285243 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/85f29bf7-4f84-4317-ac7e-b8724401c99f-nmstate-lock\") pod \"nmstate-handler-m57sm\" (UID: \"85f29bf7-4f84-4317-ac7e-b8724401c99f\") " pod="openshift-nmstate/nmstate-handler-m57sm" Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.285312 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/734625b7-c050-470e-92b7-6a4ab5de695e-tls-key-pair\") pod \"nmstate-webhook-6d689559c5-qdgf7\" (UID: \"734625b7-c050-470e-92b7-6a4ab5de695e\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-qdgf7" Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.285364 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/85f29bf7-4f84-4317-ac7e-b8724401c99f-dbus-socket\") pod \"nmstate-handler-m57sm\" (UID: \"85f29bf7-4f84-4317-ac7e-b8724401c99f\") " pod="openshift-nmstate/nmstate-handler-m57sm" Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.285425 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/85f29bf7-4f84-4317-ac7e-b8724401c99f-ovs-socket\") pod \"nmstate-handler-m57sm\" (UID: \"85f29bf7-4f84-4317-ac7e-b8724401c99f\") " pod="openshift-nmstate/nmstate-handler-m57sm" Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.285471 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mgbm4\" (UniqueName: \"kubernetes.io/projected/85f29bf7-4f84-4317-ac7e-b8724401c99f-kube-api-access-mgbm4\") pod \"nmstate-handler-m57sm\" (UID: \"85f29bf7-4f84-4317-ac7e-b8724401c99f\") " pod="openshift-nmstate/nmstate-handler-m57sm" Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.285514 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vnv7r\" (UniqueName: \"kubernetes.io/projected/734625b7-c050-470e-92b7-6a4ab5de695e-kube-api-access-vnv7r\") pod \"nmstate-webhook-6d689559c5-qdgf7\" (UID: \"734625b7-c050-470e-92b7-6a4ab5de695e\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-qdgf7" Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.285544 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vgzc2\" (UniqueName: \"kubernetes.io/projected/0ae2d175-d9c4-4a66-9747-fd82fef23890-kube-api-access-vgzc2\") pod \"nmstate-metrics-58fcddf996-jmsmg\" (UID: \"0ae2d175-d9c4-4a66-9747-fd82fef23890\") " pod="openshift-nmstate/nmstate-metrics-58fcddf996-jmsmg" Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.294354 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/734625b7-c050-470e-92b7-6a4ab5de695e-tls-key-pair\") pod \"nmstate-webhook-6d689559c5-qdgf7\" (UID: \"734625b7-c050-470e-92b7-6a4ab5de695e\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-qdgf7" Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.310382 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vnv7r\" (UniqueName: \"kubernetes.io/projected/734625b7-c050-470e-92b7-6a4ab5de695e-kube-api-access-vnv7r\") pod \"nmstate-webhook-6d689559c5-qdgf7\" (UID: \"734625b7-c050-470e-92b7-6a4ab5de695e\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-qdgf7" Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.314467 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vgzc2\" (UniqueName: \"kubernetes.io/projected/0ae2d175-d9c4-4a66-9747-fd82fef23890-kube-api-access-vgzc2\") pod \"nmstate-metrics-58fcddf996-jmsmg\" (UID: \"0ae2d175-d9c4-4a66-9747-fd82fef23890\") " pod="openshift-nmstate/nmstate-metrics-58fcddf996-jmsmg" Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.375871 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-58fcddf996-jmsmg" Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.379995 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-54d6d488b5-z2mz8"] Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.380701 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-54d6d488b5-z2mz8" Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.387562 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/85f29bf7-4f84-4317-ac7e-b8724401c99f-nmstate-lock\") pod \"nmstate-handler-m57sm\" (UID: \"85f29bf7-4f84-4317-ac7e-b8724401c99f\") " pod="openshift-nmstate/nmstate-handler-m57sm" Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.387607 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/85f29bf7-4f84-4317-ac7e-b8724401c99f-dbus-socket\") pod \"nmstate-handler-m57sm\" (UID: \"85f29bf7-4f84-4317-ac7e-b8724401c99f\") " pod="openshift-nmstate/nmstate-handler-m57sm" Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.387634 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/603ee12c-cc36-4dd0-af9b-efa00c50712b-plugin-serving-cert\") pod \"nmstate-console-plugin-864bb6dfb5-rqglt\" (UID: \"603ee12c-cc36-4dd0-af9b-efa00c50712b\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-rqglt" Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.387667 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/85f29bf7-4f84-4317-ac7e-b8724401c99f-ovs-socket\") pod \"nmstate-handler-m57sm\" (UID: \"85f29bf7-4f84-4317-ac7e-b8724401c99f\") " pod="openshift-nmstate/nmstate-handler-m57sm" Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.387692 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j4fxb\" (UniqueName: \"kubernetes.io/projected/603ee12c-cc36-4dd0-af9b-efa00c50712b-kube-api-access-j4fxb\") pod \"nmstate-console-plugin-864bb6dfb5-rqglt\" (UID: \"603ee12c-cc36-4dd0-af9b-efa00c50712b\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-rqglt" Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.387713 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mgbm4\" (UniqueName: \"kubernetes.io/projected/85f29bf7-4f84-4317-ac7e-b8724401c99f-kube-api-access-mgbm4\") pod \"nmstate-handler-m57sm\" (UID: \"85f29bf7-4f84-4317-ac7e-b8724401c99f\") " pod="openshift-nmstate/nmstate-handler-m57sm" Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.387743 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/603ee12c-cc36-4dd0-af9b-efa00c50712b-nginx-conf\") pod \"nmstate-console-plugin-864bb6dfb5-rqglt\" (UID: \"603ee12c-cc36-4dd0-af9b-efa00c50712b\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-rqglt" Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.387844 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/85f29bf7-4f84-4317-ac7e-b8724401c99f-ovs-socket\") pod \"nmstate-handler-m57sm\" (UID: \"85f29bf7-4f84-4317-ac7e-b8724401c99f\") " pod="openshift-nmstate/nmstate-handler-m57sm" Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.387897 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/85f29bf7-4f84-4317-ac7e-b8724401c99f-nmstate-lock\") pod \"nmstate-handler-m57sm\" (UID: \"85f29bf7-4f84-4317-ac7e-b8724401c99f\") " pod="openshift-nmstate/nmstate-handler-m57sm" Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.387938 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6d689559c5-qdgf7" Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.388050 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/85f29bf7-4f84-4317-ac7e-b8724401c99f-dbus-socket\") pod \"nmstate-handler-m57sm\" (UID: \"85f29bf7-4f84-4317-ac7e-b8724401c99f\") " pod="openshift-nmstate/nmstate-handler-m57sm" Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.419846 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-54d6d488b5-z2mz8"] Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.428011 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mgbm4\" (UniqueName: \"kubernetes.io/projected/85f29bf7-4f84-4317-ac7e-b8724401c99f-kube-api-access-mgbm4\") pod \"nmstate-handler-m57sm\" (UID: \"85f29bf7-4f84-4317-ac7e-b8724401c99f\") " pod="openshift-nmstate/nmstate-handler-m57sm" Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.437220 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-m57sm" Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.490083 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/c0924427-9e3e-424b-aece-f1841545e16f-oauth-serving-cert\") pod \"console-54d6d488b5-z2mz8\" (UID: \"c0924427-9e3e-424b-aece-f1841545e16f\") " pod="openshift-console/console-54d6d488b5-z2mz8" Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.490125 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/c0924427-9e3e-424b-aece-f1841545e16f-console-oauth-config\") pod \"console-54d6d488b5-z2mz8\" (UID: \"c0924427-9e3e-424b-aece-f1841545e16f\") " pod="openshift-console/console-54d6d488b5-z2mz8" Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.490153 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c0924427-9e3e-424b-aece-f1841545e16f-trusted-ca-bundle\") pod \"console-54d6d488b5-z2mz8\" (UID: \"c0924427-9e3e-424b-aece-f1841545e16f\") " pod="openshift-console/console-54d6d488b5-z2mz8" Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.490175 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/603ee12c-cc36-4dd0-af9b-efa00c50712b-plugin-serving-cert\") pod \"nmstate-console-plugin-864bb6dfb5-rqglt\" (UID: \"603ee12c-cc36-4dd0-af9b-efa00c50712b\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-rqglt" Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.490193 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/c0924427-9e3e-424b-aece-f1841545e16f-console-config\") pod \"console-54d6d488b5-z2mz8\" (UID: \"c0924427-9e3e-424b-aece-f1841545e16f\") " pod="openshift-console/console-54d6d488b5-z2mz8" Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.490228 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j4fxb\" (UniqueName: \"kubernetes.io/projected/603ee12c-cc36-4dd0-af9b-efa00c50712b-kube-api-access-j4fxb\") pod \"nmstate-console-plugin-864bb6dfb5-rqglt\" (UID: \"603ee12c-cc36-4dd0-af9b-efa00c50712b\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-rqglt" Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.490259 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/603ee12c-cc36-4dd0-af9b-efa00c50712b-nginx-conf\") pod \"nmstate-console-plugin-864bb6dfb5-rqglt\" (UID: \"603ee12c-cc36-4dd0-af9b-efa00c50712b\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-rqglt" Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.490278 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/c0924427-9e3e-424b-aece-f1841545e16f-service-ca\") pod \"console-54d6d488b5-z2mz8\" (UID: \"c0924427-9e3e-424b-aece-f1841545e16f\") " pod="openshift-console/console-54d6d488b5-z2mz8" Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.490292 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-24l4x\" (UniqueName: \"kubernetes.io/projected/c0924427-9e3e-424b-aece-f1841545e16f-kube-api-access-24l4x\") pod \"console-54d6d488b5-z2mz8\" (UID: \"c0924427-9e3e-424b-aece-f1841545e16f\") " pod="openshift-console/console-54d6d488b5-z2mz8" Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.490313 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/c0924427-9e3e-424b-aece-f1841545e16f-console-serving-cert\") pod \"console-54d6d488b5-z2mz8\" (UID: \"c0924427-9e3e-424b-aece-f1841545e16f\") " pod="openshift-console/console-54d6d488b5-z2mz8" Sep 29 21:36:26 crc kubenswrapper[4911]: E0929 21:36:26.490664 4911 secret.go:188] Couldn't get secret openshift-nmstate/plugin-serving-cert: secret "plugin-serving-cert" not found Sep 29 21:36:26 crc kubenswrapper[4911]: E0929 21:36:26.490715 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/603ee12c-cc36-4dd0-af9b-efa00c50712b-plugin-serving-cert podName:603ee12c-cc36-4dd0-af9b-efa00c50712b nodeName:}" failed. No retries permitted until 2025-09-29 21:36:26.990696011 +0000 UTC m=+664.967808682 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "plugin-serving-cert" (UniqueName: "kubernetes.io/secret/603ee12c-cc36-4dd0-af9b-efa00c50712b-plugin-serving-cert") pod "nmstate-console-plugin-864bb6dfb5-rqglt" (UID: "603ee12c-cc36-4dd0-af9b-efa00c50712b") : secret "plugin-serving-cert" not found Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.492044 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/603ee12c-cc36-4dd0-af9b-efa00c50712b-nginx-conf\") pod \"nmstate-console-plugin-864bb6dfb5-rqglt\" (UID: \"603ee12c-cc36-4dd0-af9b-efa00c50712b\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-rqglt" Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.508756 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j4fxb\" (UniqueName: \"kubernetes.io/projected/603ee12c-cc36-4dd0-af9b-efa00c50712b-kube-api-access-j4fxb\") pod \"nmstate-console-plugin-864bb6dfb5-rqglt\" (UID: \"603ee12c-cc36-4dd0-af9b-efa00c50712b\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-rqglt" Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.591064 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/c0924427-9e3e-424b-aece-f1841545e16f-oauth-serving-cert\") pod \"console-54d6d488b5-z2mz8\" (UID: \"c0924427-9e3e-424b-aece-f1841545e16f\") " pod="openshift-console/console-54d6d488b5-z2mz8" Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.591112 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/c0924427-9e3e-424b-aece-f1841545e16f-console-oauth-config\") pod \"console-54d6d488b5-z2mz8\" (UID: \"c0924427-9e3e-424b-aece-f1841545e16f\") " pod="openshift-console/console-54d6d488b5-z2mz8" Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.591142 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c0924427-9e3e-424b-aece-f1841545e16f-trusted-ca-bundle\") pod \"console-54d6d488b5-z2mz8\" (UID: \"c0924427-9e3e-424b-aece-f1841545e16f\") " pod="openshift-console/console-54d6d488b5-z2mz8" Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.591175 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/c0924427-9e3e-424b-aece-f1841545e16f-console-config\") pod \"console-54d6d488b5-z2mz8\" (UID: \"c0924427-9e3e-424b-aece-f1841545e16f\") " pod="openshift-console/console-54d6d488b5-z2mz8" Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.591227 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/c0924427-9e3e-424b-aece-f1841545e16f-service-ca\") pod \"console-54d6d488b5-z2mz8\" (UID: \"c0924427-9e3e-424b-aece-f1841545e16f\") " pod="openshift-console/console-54d6d488b5-z2mz8" Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.591243 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-24l4x\" (UniqueName: \"kubernetes.io/projected/c0924427-9e3e-424b-aece-f1841545e16f-kube-api-access-24l4x\") pod \"console-54d6d488b5-z2mz8\" (UID: \"c0924427-9e3e-424b-aece-f1841545e16f\") " pod="openshift-console/console-54d6d488b5-z2mz8" Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.591265 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/c0924427-9e3e-424b-aece-f1841545e16f-console-serving-cert\") pod \"console-54d6d488b5-z2mz8\" (UID: \"c0924427-9e3e-424b-aece-f1841545e16f\") " pod="openshift-console/console-54d6d488b5-z2mz8" Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.592232 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/c0924427-9e3e-424b-aece-f1841545e16f-console-config\") pod \"console-54d6d488b5-z2mz8\" (UID: \"c0924427-9e3e-424b-aece-f1841545e16f\") " pod="openshift-console/console-54d6d488b5-z2mz8" Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.592641 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/c0924427-9e3e-424b-aece-f1841545e16f-service-ca\") pod \"console-54d6d488b5-z2mz8\" (UID: \"c0924427-9e3e-424b-aece-f1841545e16f\") " pod="openshift-console/console-54d6d488b5-z2mz8" Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.593884 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c0924427-9e3e-424b-aece-f1841545e16f-trusted-ca-bundle\") pod \"console-54d6d488b5-z2mz8\" (UID: \"c0924427-9e3e-424b-aece-f1841545e16f\") " pod="openshift-console/console-54d6d488b5-z2mz8" Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.595500 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/c0924427-9e3e-424b-aece-f1841545e16f-oauth-serving-cert\") pod \"console-54d6d488b5-z2mz8\" (UID: \"c0924427-9e3e-424b-aece-f1841545e16f\") " pod="openshift-console/console-54d6d488b5-z2mz8" Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.598475 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/c0924427-9e3e-424b-aece-f1841545e16f-console-oauth-config\") pod \"console-54d6d488b5-z2mz8\" (UID: \"c0924427-9e3e-424b-aece-f1841545e16f\") " pod="openshift-console/console-54d6d488b5-z2mz8" Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.598680 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/c0924427-9e3e-424b-aece-f1841545e16f-console-serving-cert\") pod \"console-54d6d488b5-z2mz8\" (UID: \"c0924427-9e3e-424b-aece-f1841545e16f\") " pod="openshift-console/console-54d6d488b5-z2mz8" Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.601305 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-58fcddf996-jmsmg"] Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.611256 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-24l4x\" (UniqueName: \"kubernetes.io/projected/c0924427-9e3e-424b-aece-f1841545e16f-kube-api-access-24l4x\") pod \"console-54d6d488b5-z2mz8\" (UID: \"c0924427-9e3e-424b-aece-f1841545e16f\") " pod="openshift-console/console-54d6d488b5-z2mz8" Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.651479 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6d689559c5-qdgf7"] Sep 29 21:36:26 crc kubenswrapper[4911]: W0929 21:36:26.652078 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod734625b7_c050_470e_92b7_6a4ab5de695e.slice/crio-2937e562ad5f2de38001a6483f792baa8c82339e7d231fdb9d38e40c0de3d96f WatchSource:0}: Error finding container 2937e562ad5f2de38001a6483f792baa8c82339e7d231fdb9d38e40c0de3d96f: Status 404 returned error can't find the container with id 2937e562ad5f2de38001a6483f792baa8c82339e7d231fdb9d38e40c0de3d96f Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.742441 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-54d6d488b5-z2mz8" Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.988548 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-54d6d488b5-z2mz8"] Sep 29 21:36:26 crc kubenswrapper[4911]: W0929 21:36:26.992006 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc0924427_9e3e_424b_aece_f1841545e16f.slice/crio-555599be6119b1c3f8657768a472d458ced1f96457ae1a2297cadf17c5cc29e9 WatchSource:0}: Error finding container 555599be6119b1c3f8657768a472d458ced1f96457ae1a2297cadf17c5cc29e9: Status 404 returned error can't find the container with id 555599be6119b1c3f8657768a472d458ced1f96457ae1a2297cadf17c5cc29e9 Sep 29 21:36:26 crc kubenswrapper[4911]: I0929 21:36:26.995568 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/603ee12c-cc36-4dd0-af9b-efa00c50712b-plugin-serving-cert\") pod \"nmstate-console-plugin-864bb6dfb5-rqglt\" (UID: \"603ee12c-cc36-4dd0-af9b-efa00c50712b\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-rqglt" Sep 29 21:36:27 crc kubenswrapper[4911]: I0929 21:36:27.002996 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/603ee12c-cc36-4dd0-af9b-efa00c50712b-plugin-serving-cert\") pod \"nmstate-console-plugin-864bb6dfb5-rqglt\" (UID: \"603ee12c-cc36-4dd0-af9b-efa00c50712b\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-rqglt" Sep 29 21:36:27 crc kubenswrapper[4911]: I0929 21:36:27.026594 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6d689559c5-qdgf7" event={"ID":"734625b7-c050-470e-92b7-6a4ab5de695e","Type":"ContainerStarted","Data":"2937e562ad5f2de38001a6483f792baa8c82339e7d231fdb9d38e40c0de3d96f"} Sep 29 21:36:27 crc kubenswrapper[4911]: I0929 21:36:27.028303 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-54d6d488b5-z2mz8" event={"ID":"c0924427-9e3e-424b-aece-f1841545e16f","Type":"ContainerStarted","Data":"555599be6119b1c3f8657768a472d458ced1f96457ae1a2297cadf17c5cc29e9"} Sep 29 21:36:27 crc kubenswrapper[4911]: I0929 21:36:27.029643 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-58fcddf996-jmsmg" event={"ID":"0ae2d175-d9c4-4a66-9747-fd82fef23890","Type":"ContainerStarted","Data":"0aab4724e35d97b6911c4911ce933184753e4520f80a5873932af2ab218b32ce"} Sep 29 21:36:27 crc kubenswrapper[4911]: I0929 21:36:27.031465 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-m57sm" event={"ID":"85f29bf7-4f84-4317-ac7e-b8724401c99f","Type":"ContainerStarted","Data":"877316e18f41433c358a4d8aeb63fc2052f4535fa7931060fefc5d77e7d286f3"} Sep 29 21:36:27 crc kubenswrapper[4911]: I0929 21:36:27.107491 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-rqglt" Sep 29 21:36:27 crc kubenswrapper[4911]: I0929 21:36:27.379038 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-864bb6dfb5-rqglt"] Sep 29 21:36:28 crc kubenswrapper[4911]: I0929 21:36:28.038179 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-54d6d488b5-z2mz8" event={"ID":"c0924427-9e3e-424b-aece-f1841545e16f","Type":"ContainerStarted","Data":"eeffccb82859cff6a95e85b153bc841ad73f8edf1cc3ed1ea312614fba4123c2"} Sep 29 21:36:28 crc kubenswrapper[4911]: I0929 21:36:28.040783 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-rqglt" event={"ID":"603ee12c-cc36-4dd0-af9b-efa00c50712b","Type":"ContainerStarted","Data":"f0576796e68b387b761946028330f08bb479808098d27464fa360e05babcdad3"} Sep 29 21:36:28 crc kubenswrapper[4911]: I0929 21:36:28.060235 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-54d6d488b5-z2mz8" podStartSLOduration=2.060217133 podStartE2EDuration="2.060217133s" podCreationTimestamp="2025-09-29 21:36:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:36:28.05887555 +0000 UTC m=+666.035988241" watchObservedRunningTime="2025-09-29 21:36:28.060217133 +0000 UTC m=+666.037329804" Sep 29 21:36:30 crc kubenswrapper[4911]: I0929 21:36:30.059596 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6d689559c5-qdgf7" event={"ID":"734625b7-c050-470e-92b7-6a4ab5de695e","Type":"ContainerStarted","Data":"c434dd0eef9dcba42143e43b71b14bad08f5def1b6912f40ac371a1034b23486"} Sep 29 21:36:30 crc kubenswrapper[4911]: I0929 21:36:30.060138 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-6d689559c5-qdgf7" Sep 29 21:36:30 crc kubenswrapper[4911]: I0929 21:36:30.061497 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-58fcddf996-jmsmg" event={"ID":"0ae2d175-d9c4-4a66-9747-fd82fef23890","Type":"ContainerStarted","Data":"087f39a3cd39fc9aaec43284f2e0f5d7f8b50dcb3cfceef2031e30edef7a132a"} Sep 29 21:36:30 crc kubenswrapper[4911]: I0929 21:36:30.063528 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-m57sm" event={"ID":"85f29bf7-4f84-4317-ac7e-b8724401c99f","Type":"ContainerStarted","Data":"7c154f4f3f3e8e32b038d0bbdffc711734b8ab22a70616bbb2f65a3c89fd35e6"} Sep 29 21:36:30 crc kubenswrapper[4911]: I0929 21:36:30.064307 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-m57sm" Sep 29 21:36:30 crc kubenswrapper[4911]: I0929 21:36:30.087610 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-6d689559c5-qdgf7" podStartSLOduration=1.744345644 podStartE2EDuration="4.087456235s" podCreationTimestamp="2025-09-29 21:36:26 +0000 UTC" firstStartedPulling="2025-09-29 21:36:26.654272286 +0000 UTC m=+664.631384957" lastFinishedPulling="2025-09-29 21:36:28.997382867 +0000 UTC m=+666.974495548" observedRunningTime="2025-09-29 21:36:30.082469775 +0000 UTC m=+668.059582476" watchObservedRunningTime="2025-09-29 21:36:30.087456235 +0000 UTC m=+668.064568906" Sep 29 21:36:30 crc kubenswrapper[4911]: I0929 21:36:30.100588 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-m57sm" podStartSLOduration=1.544726109 podStartE2EDuration="4.100569484s" podCreationTimestamp="2025-09-29 21:36:26 +0000 UTC" firstStartedPulling="2025-09-29 21:36:26.46215457 +0000 UTC m=+664.439267241" lastFinishedPulling="2025-09-29 21:36:29.017997905 +0000 UTC m=+666.995110616" observedRunningTime="2025-09-29 21:36:30.099554012 +0000 UTC m=+668.076666683" watchObservedRunningTime="2025-09-29 21:36:30.100569484 +0000 UTC m=+668.077682165" Sep 29 21:36:31 crc kubenswrapper[4911]: I0929 21:36:31.072049 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-rqglt" event={"ID":"603ee12c-cc36-4dd0-af9b-efa00c50712b","Type":"ContainerStarted","Data":"84994c14f31e0e91c002d60dc4b42c43d709b349a8e9ef377ce94eb4b72bf67d"} Sep 29 21:36:31 crc kubenswrapper[4911]: I0929 21:36:31.095363 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-rqglt" podStartSLOduration=2.561818055 podStartE2EDuration="5.095345178s" podCreationTimestamp="2025-09-29 21:36:26 +0000 UTC" firstStartedPulling="2025-09-29 21:36:27.390233993 +0000 UTC m=+665.367346664" lastFinishedPulling="2025-09-29 21:36:29.923761096 +0000 UTC m=+667.900873787" observedRunningTime="2025-09-29 21:36:31.091911429 +0000 UTC m=+669.069024120" watchObservedRunningTime="2025-09-29 21:36:31.095345178 +0000 UTC m=+669.072457839" Sep 29 21:36:32 crc kubenswrapper[4911]: I0929 21:36:32.083038 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-58fcddf996-jmsmg" event={"ID":"0ae2d175-d9c4-4a66-9747-fd82fef23890","Type":"ContainerStarted","Data":"13115724d8175041b9929e12c67671b503165bc47635f5384fb188b3c99530d4"} Sep 29 21:36:32 crc kubenswrapper[4911]: I0929 21:36:32.107205 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-58fcddf996-jmsmg" podStartSLOduration=1.273791335 podStartE2EDuration="6.107182557s" podCreationTimestamp="2025-09-29 21:36:26 +0000 UTC" firstStartedPulling="2025-09-29 21:36:26.608327719 +0000 UTC m=+664.585440390" lastFinishedPulling="2025-09-29 21:36:31.441718941 +0000 UTC m=+669.418831612" observedRunningTime="2025-09-29 21:36:32.103433457 +0000 UTC m=+670.080546188" watchObservedRunningTime="2025-09-29 21:36:32.107182557 +0000 UTC m=+670.084295278" Sep 29 21:36:36 crc kubenswrapper[4911]: I0929 21:36:36.476197 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-m57sm" Sep 29 21:36:36 crc kubenswrapper[4911]: I0929 21:36:36.742575 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-54d6d488b5-z2mz8" Sep 29 21:36:36 crc kubenswrapper[4911]: I0929 21:36:36.742685 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-54d6d488b5-z2mz8" Sep 29 21:36:36 crc kubenswrapper[4911]: I0929 21:36:36.748755 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-54d6d488b5-z2mz8" Sep 29 21:36:37 crc kubenswrapper[4911]: I0929 21:36:37.129960 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-54d6d488b5-z2mz8" Sep 29 21:36:37 crc kubenswrapper[4911]: I0929 21:36:37.202219 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-szrp2"] Sep 29 21:36:46 crc kubenswrapper[4911]: I0929 21:36:46.398209 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-6d689559c5-qdgf7" Sep 29 21:36:55 crc kubenswrapper[4911]: I0929 21:36:55.211396 4911 patch_prober.go:28] interesting pod/machine-config-daemon-w647f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 21:36:55 crc kubenswrapper[4911]: I0929 21:36:55.213062 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 21:37:02 crc kubenswrapper[4911]: I0929 21:37:02.258301 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-szrp2" podUID="96f91b1a-e276-4bc1-9308-5375745c803c" containerName="console" containerID="cri-o://e88a4340a9dfdea81cc1df61fbff85e7a579e63ba69b15b3a4fc8aa4a5e7dad7" gracePeriod=15 Sep 29 21:37:02 crc kubenswrapper[4911]: I0929 21:37:02.705997 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-szrp2_96f91b1a-e276-4bc1-9308-5375745c803c/console/0.log" Sep 29 21:37:02 crc kubenswrapper[4911]: I0929 21:37:02.706385 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-szrp2" Sep 29 21:37:02 crc kubenswrapper[4911]: I0929 21:37:02.808274 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/96f91b1a-e276-4bc1-9308-5375745c803c-service-ca\") pod \"96f91b1a-e276-4bc1-9308-5375745c803c\" (UID: \"96f91b1a-e276-4bc1-9308-5375745c803c\") " Sep 29 21:37:02 crc kubenswrapper[4911]: I0929 21:37:02.808333 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/96f91b1a-e276-4bc1-9308-5375745c803c-oauth-serving-cert\") pod \"96f91b1a-e276-4bc1-9308-5375745c803c\" (UID: \"96f91b1a-e276-4bc1-9308-5375745c803c\") " Sep 29 21:37:02 crc kubenswrapper[4911]: I0929 21:37:02.808354 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/96f91b1a-e276-4bc1-9308-5375745c803c-console-config\") pod \"96f91b1a-e276-4bc1-9308-5375745c803c\" (UID: \"96f91b1a-e276-4bc1-9308-5375745c803c\") " Sep 29 21:37:02 crc kubenswrapper[4911]: I0929 21:37:02.808374 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/96f91b1a-e276-4bc1-9308-5375745c803c-console-oauth-config\") pod \"96f91b1a-e276-4bc1-9308-5375745c803c\" (UID: \"96f91b1a-e276-4bc1-9308-5375745c803c\") " Sep 29 21:37:02 crc kubenswrapper[4911]: I0929 21:37:02.808417 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/96f91b1a-e276-4bc1-9308-5375745c803c-console-serving-cert\") pod \"96f91b1a-e276-4bc1-9308-5375745c803c\" (UID: \"96f91b1a-e276-4bc1-9308-5375745c803c\") " Sep 29 21:37:02 crc kubenswrapper[4911]: I0929 21:37:02.808454 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/96f91b1a-e276-4bc1-9308-5375745c803c-trusted-ca-bundle\") pod \"96f91b1a-e276-4bc1-9308-5375745c803c\" (UID: \"96f91b1a-e276-4bc1-9308-5375745c803c\") " Sep 29 21:37:02 crc kubenswrapper[4911]: I0929 21:37:02.808473 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n66j4\" (UniqueName: \"kubernetes.io/projected/96f91b1a-e276-4bc1-9308-5375745c803c-kube-api-access-n66j4\") pod \"96f91b1a-e276-4bc1-9308-5375745c803c\" (UID: \"96f91b1a-e276-4bc1-9308-5375745c803c\") " Sep 29 21:37:02 crc kubenswrapper[4911]: I0929 21:37:02.809526 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/96f91b1a-e276-4bc1-9308-5375745c803c-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "96f91b1a-e276-4bc1-9308-5375745c803c" (UID: "96f91b1a-e276-4bc1-9308-5375745c803c"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:37:02 crc kubenswrapper[4911]: I0929 21:37:02.809545 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/96f91b1a-e276-4bc1-9308-5375745c803c-console-config" (OuterVolumeSpecName: "console-config") pod "96f91b1a-e276-4bc1-9308-5375745c803c" (UID: "96f91b1a-e276-4bc1-9308-5375745c803c"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:37:02 crc kubenswrapper[4911]: I0929 21:37:02.809594 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/96f91b1a-e276-4bc1-9308-5375745c803c-service-ca" (OuterVolumeSpecName: "service-ca") pod "96f91b1a-e276-4bc1-9308-5375745c803c" (UID: "96f91b1a-e276-4bc1-9308-5375745c803c"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:37:02 crc kubenswrapper[4911]: I0929 21:37:02.810187 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/96f91b1a-e276-4bc1-9308-5375745c803c-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "96f91b1a-e276-4bc1-9308-5375745c803c" (UID: "96f91b1a-e276-4bc1-9308-5375745c803c"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:37:02 crc kubenswrapper[4911]: I0929 21:37:02.814826 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96f91b1a-e276-4bc1-9308-5375745c803c-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "96f91b1a-e276-4bc1-9308-5375745c803c" (UID: "96f91b1a-e276-4bc1-9308-5375745c803c"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:37:02 crc kubenswrapper[4911]: I0929 21:37:02.815277 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96f91b1a-e276-4bc1-9308-5375745c803c-kube-api-access-n66j4" (OuterVolumeSpecName: "kube-api-access-n66j4") pod "96f91b1a-e276-4bc1-9308-5375745c803c" (UID: "96f91b1a-e276-4bc1-9308-5375745c803c"). InnerVolumeSpecName "kube-api-access-n66j4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:37:02 crc kubenswrapper[4911]: I0929 21:37:02.815482 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96f91b1a-e276-4bc1-9308-5375745c803c-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "96f91b1a-e276-4bc1-9308-5375745c803c" (UID: "96f91b1a-e276-4bc1-9308-5375745c803c"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:37:02 crc kubenswrapper[4911]: I0929 21:37:02.909914 4911 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/96f91b1a-e276-4bc1-9308-5375745c803c-service-ca\") on node \"crc\" DevicePath \"\"" Sep 29 21:37:02 crc kubenswrapper[4911]: I0929 21:37:02.909949 4911 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/96f91b1a-e276-4bc1-9308-5375745c803c-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 21:37:02 crc kubenswrapper[4911]: I0929 21:37:02.909988 4911 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/96f91b1a-e276-4bc1-9308-5375745c803c-console-config\") on node \"crc\" DevicePath \"\"" Sep 29 21:37:02 crc kubenswrapper[4911]: I0929 21:37:02.909999 4911 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/96f91b1a-e276-4bc1-9308-5375745c803c-console-oauth-config\") on node \"crc\" DevicePath \"\"" Sep 29 21:37:02 crc kubenswrapper[4911]: I0929 21:37:02.910009 4911 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/96f91b1a-e276-4bc1-9308-5375745c803c-console-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 21:37:02 crc kubenswrapper[4911]: I0929 21:37:02.910018 4911 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/96f91b1a-e276-4bc1-9308-5375745c803c-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 21:37:02 crc kubenswrapper[4911]: I0929 21:37:02.910027 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n66j4\" (UniqueName: \"kubernetes.io/projected/96f91b1a-e276-4bc1-9308-5375745c803c-kube-api-access-n66j4\") on node \"crc\" DevicePath \"\"" Sep 29 21:37:03 crc kubenswrapper[4911]: I0929 21:37:03.349674 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-szrp2_96f91b1a-e276-4bc1-9308-5375745c803c/console/0.log" Sep 29 21:37:03 crc kubenswrapper[4911]: I0929 21:37:03.349753 4911 generic.go:334] "Generic (PLEG): container finished" podID="96f91b1a-e276-4bc1-9308-5375745c803c" containerID="e88a4340a9dfdea81cc1df61fbff85e7a579e63ba69b15b3a4fc8aa4a5e7dad7" exitCode=2 Sep 29 21:37:03 crc kubenswrapper[4911]: I0929 21:37:03.349827 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-szrp2" event={"ID":"96f91b1a-e276-4bc1-9308-5375745c803c","Type":"ContainerDied","Data":"e88a4340a9dfdea81cc1df61fbff85e7a579e63ba69b15b3a4fc8aa4a5e7dad7"} Sep 29 21:37:03 crc kubenswrapper[4911]: I0929 21:37:03.349877 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-szrp2" event={"ID":"96f91b1a-e276-4bc1-9308-5375745c803c","Type":"ContainerDied","Data":"c74ef1122b06529e8b44a70c1679d812075d29bb27f8a7112384540ab2b0ebd4"} Sep 29 21:37:03 crc kubenswrapper[4911]: I0929 21:37:03.349894 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-szrp2" Sep 29 21:37:03 crc kubenswrapper[4911]: I0929 21:37:03.349906 4911 scope.go:117] "RemoveContainer" containerID="e88a4340a9dfdea81cc1df61fbff85e7a579e63ba69b15b3a4fc8aa4a5e7dad7" Sep 29 21:37:03 crc kubenswrapper[4911]: I0929 21:37:03.385598 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-szrp2"] Sep 29 21:37:03 crc kubenswrapper[4911]: I0929 21:37:03.389705 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-szrp2"] Sep 29 21:37:03 crc kubenswrapper[4911]: I0929 21:37:03.718893 4911 scope.go:117] "RemoveContainer" containerID="e88a4340a9dfdea81cc1df61fbff85e7a579e63ba69b15b3a4fc8aa4a5e7dad7" Sep 29 21:37:03 crc kubenswrapper[4911]: E0929 21:37:03.719691 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e88a4340a9dfdea81cc1df61fbff85e7a579e63ba69b15b3a4fc8aa4a5e7dad7\": container with ID starting with e88a4340a9dfdea81cc1df61fbff85e7a579e63ba69b15b3a4fc8aa4a5e7dad7 not found: ID does not exist" containerID="e88a4340a9dfdea81cc1df61fbff85e7a579e63ba69b15b3a4fc8aa4a5e7dad7" Sep 29 21:37:03 crc kubenswrapper[4911]: I0929 21:37:03.719745 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e88a4340a9dfdea81cc1df61fbff85e7a579e63ba69b15b3a4fc8aa4a5e7dad7"} err="failed to get container status \"e88a4340a9dfdea81cc1df61fbff85e7a579e63ba69b15b3a4fc8aa4a5e7dad7\": rpc error: code = NotFound desc = could not find container \"e88a4340a9dfdea81cc1df61fbff85e7a579e63ba69b15b3a4fc8aa4a5e7dad7\": container with ID starting with e88a4340a9dfdea81cc1df61fbff85e7a579e63ba69b15b3a4fc8aa4a5e7dad7 not found: ID does not exist" Sep 29 21:37:04 crc kubenswrapper[4911]: I0929 21:37:04.259399 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h4chr"] Sep 29 21:37:04 crc kubenswrapper[4911]: E0929 21:37:04.259936 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96f91b1a-e276-4bc1-9308-5375745c803c" containerName="console" Sep 29 21:37:04 crc kubenswrapper[4911]: I0929 21:37:04.259960 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="96f91b1a-e276-4bc1-9308-5375745c803c" containerName="console" Sep 29 21:37:04 crc kubenswrapper[4911]: I0929 21:37:04.260107 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="96f91b1a-e276-4bc1-9308-5375745c803c" containerName="console" Sep 29 21:37:04 crc kubenswrapper[4911]: I0929 21:37:04.261142 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h4chr" Sep 29 21:37:04 crc kubenswrapper[4911]: I0929 21:37:04.263506 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Sep 29 21:37:04 crc kubenswrapper[4911]: I0929 21:37:04.266257 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h4chr"] Sep 29 21:37:04 crc kubenswrapper[4911]: I0929 21:37:04.333526 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wdvxq\" (UniqueName: \"kubernetes.io/projected/d2367d33-fcb0-4433-a6cf-9dfc7e2b9bd7-kube-api-access-wdvxq\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h4chr\" (UID: \"d2367d33-fcb0-4433-a6cf-9dfc7e2b9bd7\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h4chr" Sep 29 21:37:04 crc kubenswrapper[4911]: I0929 21:37:04.333719 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d2367d33-fcb0-4433-a6cf-9dfc7e2b9bd7-util\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h4chr\" (UID: \"d2367d33-fcb0-4433-a6cf-9dfc7e2b9bd7\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h4chr" Sep 29 21:37:04 crc kubenswrapper[4911]: I0929 21:37:04.333939 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d2367d33-fcb0-4433-a6cf-9dfc7e2b9bd7-bundle\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h4chr\" (UID: \"d2367d33-fcb0-4433-a6cf-9dfc7e2b9bd7\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h4chr" Sep 29 21:37:04 crc kubenswrapper[4911]: I0929 21:37:04.434905 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d2367d33-fcb0-4433-a6cf-9dfc7e2b9bd7-bundle\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h4chr\" (UID: \"d2367d33-fcb0-4433-a6cf-9dfc7e2b9bd7\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h4chr" Sep 29 21:37:04 crc kubenswrapper[4911]: I0929 21:37:04.434997 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wdvxq\" (UniqueName: \"kubernetes.io/projected/d2367d33-fcb0-4433-a6cf-9dfc7e2b9bd7-kube-api-access-wdvxq\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h4chr\" (UID: \"d2367d33-fcb0-4433-a6cf-9dfc7e2b9bd7\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h4chr" Sep 29 21:37:04 crc kubenswrapper[4911]: I0929 21:37:04.435046 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d2367d33-fcb0-4433-a6cf-9dfc7e2b9bd7-util\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h4chr\" (UID: \"d2367d33-fcb0-4433-a6cf-9dfc7e2b9bd7\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h4chr" Sep 29 21:37:04 crc kubenswrapper[4911]: I0929 21:37:04.435556 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d2367d33-fcb0-4433-a6cf-9dfc7e2b9bd7-util\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h4chr\" (UID: \"d2367d33-fcb0-4433-a6cf-9dfc7e2b9bd7\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h4chr" Sep 29 21:37:04 crc kubenswrapper[4911]: I0929 21:37:04.435583 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d2367d33-fcb0-4433-a6cf-9dfc7e2b9bd7-bundle\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h4chr\" (UID: \"d2367d33-fcb0-4433-a6cf-9dfc7e2b9bd7\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h4chr" Sep 29 21:37:04 crc kubenswrapper[4911]: I0929 21:37:04.457075 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wdvxq\" (UniqueName: \"kubernetes.io/projected/d2367d33-fcb0-4433-a6cf-9dfc7e2b9bd7-kube-api-access-wdvxq\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h4chr\" (UID: \"d2367d33-fcb0-4433-a6cf-9dfc7e2b9bd7\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h4chr" Sep 29 21:37:04 crc kubenswrapper[4911]: I0929 21:37:04.574654 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h4chr" Sep 29 21:37:04 crc kubenswrapper[4911]: I0929 21:37:04.717683 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96f91b1a-e276-4bc1-9308-5375745c803c" path="/var/lib/kubelet/pods/96f91b1a-e276-4bc1-9308-5375745c803c/volumes" Sep 29 21:37:05 crc kubenswrapper[4911]: I0929 21:37:05.055734 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h4chr"] Sep 29 21:37:05 crc kubenswrapper[4911]: I0929 21:37:05.367062 4911 generic.go:334] "Generic (PLEG): container finished" podID="d2367d33-fcb0-4433-a6cf-9dfc7e2b9bd7" containerID="2ef760dd81d3a3119f82294aa2150ed22095e2974954c4348c31b7d923975c28" exitCode=0 Sep 29 21:37:05 crc kubenswrapper[4911]: I0929 21:37:05.367155 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h4chr" event={"ID":"d2367d33-fcb0-4433-a6cf-9dfc7e2b9bd7","Type":"ContainerDied","Data":"2ef760dd81d3a3119f82294aa2150ed22095e2974954c4348c31b7d923975c28"} Sep 29 21:37:05 crc kubenswrapper[4911]: I0929 21:37:05.367229 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h4chr" event={"ID":"d2367d33-fcb0-4433-a6cf-9dfc7e2b9bd7","Type":"ContainerStarted","Data":"75450a442fd1da5c66c1999f9bda988347fb6823f521260028d9f035bb0dbf5c"} Sep 29 21:37:07 crc kubenswrapper[4911]: I0929 21:37:07.385596 4911 generic.go:334] "Generic (PLEG): container finished" podID="d2367d33-fcb0-4433-a6cf-9dfc7e2b9bd7" containerID="a31f3445532f284ed9deb5e59f1d39169eb3cb0e4fcaee090bdbed2331e87f0f" exitCode=0 Sep 29 21:37:07 crc kubenswrapper[4911]: I0929 21:37:07.385758 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h4chr" event={"ID":"d2367d33-fcb0-4433-a6cf-9dfc7e2b9bd7","Type":"ContainerDied","Data":"a31f3445532f284ed9deb5e59f1d39169eb3cb0e4fcaee090bdbed2331e87f0f"} Sep 29 21:37:08 crc kubenswrapper[4911]: I0929 21:37:08.393645 4911 generic.go:334] "Generic (PLEG): container finished" podID="d2367d33-fcb0-4433-a6cf-9dfc7e2b9bd7" containerID="96871a4f3c9b8cdc08b8120a18c1d8fdcb6e8159856b0c449d92d5d335bd3544" exitCode=0 Sep 29 21:37:08 crc kubenswrapper[4911]: I0929 21:37:08.393695 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h4chr" event={"ID":"d2367d33-fcb0-4433-a6cf-9dfc7e2b9bd7","Type":"ContainerDied","Data":"96871a4f3c9b8cdc08b8120a18c1d8fdcb6e8159856b0c449d92d5d335bd3544"} Sep 29 21:37:09 crc kubenswrapper[4911]: I0929 21:37:09.673716 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h4chr" Sep 29 21:37:09 crc kubenswrapper[4911]: I0929 21:37:09.706885 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d2367d33-fcb0-4433-a6cf-9dfc7e2b9bd7-bundle\") pod \"d2367d33-fcb0-4433-a6cf-9dfc7e2b9bd7\" (UID: \"d2367d33-fcb0-4433-a6cf-9dfc7e2b9bd7\") " Sep 29 21:37:09 crc kubenswrapper[4911]: I0929 21:37:09.706997 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wdvxq\" (UniqueName: \"kubernetes.io/projected/d2367d33-fcb0-4433-a6cf-9dfc7e2b9bd7-kube-api-access-wdvxq\") pod \"d2367d33-fcb0-4433-a6cf-9dfc7e2b9bd7\" (UID: \"d2367d33-fcb0-4433-a6cf-9dfc7e2b9bd7\") " Sep 29 21:37:09 crc kubenswrapper[4911]: I0929 21:37:09.707102 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d2367d33-fcb0-4433-a6cf-9dfc7e2b9bd7-util\") pod \"d2367d33-fcb0-4433-a6cf-9dfc7e2b9bd7\" (UID: \"d2367d33-fcb0-4433-a6cf-9dfc7e2b9bd7\") " Sep 29 21:37:09 crc kubenswrapper[4911]: I0929 21:37:09.710307 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d2367d33-fcb0-4433-a6cf-9dfc7e2b9bd7-bundle" (OuterVolumeSpecName: "bundle") pod "d2367d33-fcb0-4433-a6cf-9dfc7e2b9bd7" (UID: "d2367d33-fcb0-4433-a6cf-9dfc7e2b9bd7"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:37:09 crc kubenswrapper[4911]: I0929 21:37:09.721980 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d2367d33-fcb0-4433-a6cf-9dfc7e2b9bd7-kube-api-access-wdvxq" (OuterVolumeSpecName: "kube-api-access-wdvxq") pod "d2367d33-fcb0-4433-a6cf-9dfc7e2b9bd7" (UID: "d2367d33-fcb0-4433-a6cf-9dfc7e2b9bd7"). InnerVolumeSpecName "kube-api-access-wdvxq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:37:09 crc kubenswrapper[4911]: I0929 21:37:09.738261 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d2367d33-fcb0-4433-a6cf-9dfc7e2b9bd7-util" (OuterVolumeSpecName: "util") pod "d2367d33-fcb0-4433-a6cf-9dfc7e2b9bd7" (UID: "d2367d33-fcb0-4433-a6cf-9dfc7e2b9bd7"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:37:09 crc kubenswrapper[4911]: I0929 21:37:09.809208 4911 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d2367d33-fcb0-4433-a6cf-9dfc7e2b9bd7-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 21:37:09 crc kubenswrapper[4911]: I0929 21:37:09.809261 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wdvxq\" (UniqueName: \"kubernetes.io/projected/d2367d33-fcb0-4433-a6cf-9dfc7e2b9bd7-kube-api-access-wdvxq\") on node \"crc\" DevicePath \"\"" Sep 29 21:37:09 crc kubenswrapper[4911]: I0929 21:37:09.809280 4911 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d2367d33-fcb0-4433-a6cf-9dfc7e2b9bd7-util\") on node \"crc\" DevicePath \"\"" Sep 29 21:37:10 crc kubenswrapper[4911]: I0929 21:37:10.411940 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h4chr" event={"ID":"d2367d33-fcb0-4433-a6cf-9dfc7e2b9bd7","Type":"ContainerDied","Data":"75450a442fd1da5c66c1999f9bda988347fb6823f521260028d9f035bb0dbf5c"} Sep 29 21:37:10 crc kubenswrapper[4911]: I0929 21:37:10.412035 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="75450a442fd1da5c66c1999f9bda988347fb6823f521260028d9f035bb0dbf5c" Sep 29 21:37:10 crc kubenswrapper[4911]: I0929 21:37:10.412064 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h4chr" Sep 29 21:37:19 crc kubenswrapper[4911]: I0929 21:37:19.583915 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-5bf9d4f487-rzrcz"] Sep 29 21:37:19 crc kubenswrapper[4911]: E0929 21:37:19.584649 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d2367d33-fcb0-4433-a6cf-9dfc7e2b9bd7" containerName="util" Sep 29 21:37:19 crc kubenswrapper[4911]: I0929 21:37:19.584663 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="d2367d33-fcb0-4433-a6cf-9dfc7e2b9bd7" containerName="util" Sep 29 21:37:19 crc kubenswrapper[4911]: E0929 21:37:19.584673 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d2367d33-fcb0-4433-a6cf-9dfc7e2b9bd7" containerName="extract" Sep 29 21:37:19 crc kubenswrapper[4911]: I0929 21:37:19.584678 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="d2367d33-fcb0-4433-a6cf-9dfc7e2b9bd7" containerName="extract" Sep 29 21:37:19 crc kubenswrapper[4911]: E0929 21:37:19.584692 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d2367d33-fcb0-4433-a6cf-9dfc7e2b9bd7" containerName="pull" Sep 29 21:37:19 crc kubenswrapper[4911]: I0929 21:37:19.584698 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="d2367d33-fcb0-4433-a6cf-9dfc7e2b9bd7" containerName="pull" Sep 29 21:37:19 crc kubenswrapper[4911]: I0929 21:37:19.584844 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="d2367d33-fcb0-4433-a6cf-9dfc7e2b9bd7" containerName="extract" Sep 29 21:37:19 crc kubenswrapper[4911]: I0929 21:37:19.585253 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-5bf9d4f487-rzrcz" Sep 29 21:37:19 crc kubenswrapper[4911]: I0929 21:37:19.588028 4911 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Sep 29 21:37:19 crc kubenswrapper[4911]: I0929 21:37:19.588069 4911 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Sep 29 21:37:19 crc kubenswrapper[4911]: I0929 21:37:19.588687 4911 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-zpzrl" Sep 29 21:37:19 crc kubenswrapper[4911]: I0929 21:37:19.588723 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Sep 29 21:37:19 crc kubenswrapper[4911]: I0929 21:37:19.591098 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Sep 29 21:37:19 crc kubenswrapper[4911]: I0929 21:37:19.601080 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-5bf9d4f487-rzrcz"] Sep 29 21:37:19 crc kubenswrapper[4911]: I0929 21:37:19.651986 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rjfb4\" (UniqueName: \"kubernetes.io/projected/305ebfd8-5281-4a0f-9f5d-57db4028fa54-kube-api-access-rjfb4\") pod \"metallb-operator-controller-manager-5bf9d4f487-rzrcz\" (UID: \"305ebfd8-5281-4a0f-9f5d-57db4028fa54\") " pod="metallb-system/metallb-operator-controller-manager-5bf9d4f487-rzrcz" Sep 29 21:37:19 crc kubenswrapper[4911]: I0929 21:37:19.652042 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/305ebfd8-5281-4a0f-9f5d-57db4028fa54-apiservice-cert\") pod \"metallb-operator-controller-manager-5bf9d4f487-rzrcz\" (UID: \"305ebfd8-5281-4a0f-9f5d-57db4028fa54\") " pod="metallb-system/metallb-operator-controller-manager-5bf9d4f487-rzrcz" Sep 29 21:37:19 crc kubenswrapper[4911]: I0929 21:37:19.652067 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/305ebfd8-5281-4a0f-9f5d-57db4028fa54-webhook-cert\") pod \"metallb-operator-controller-manager-5bf9d4f487-rzrcz\" (UID: \"305ebfd8-5281-4a0f-9f5d-57db4028fa54\") " pod="metallb-system/metallb-operator-controller-manager-5bf9d4f487-rzrcz" Sep 29 21:37:19 crc kubenswrapper[4911]: I0929 21:37:19.753146 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/305ebfd8-5281-4a0f-9f5d-57db4028fa54-webhook-cert\") pod \"metallb-operator-controller-manager-5bf9d4f487-rzrcz\" (UID: \"305ebfd8-5281-4a0f-9f5d-57db4028fa54\") " pod="metallb-system/metallb-operator-controller-manager-5bf9d4f487-rzrcz" Sep 29 21:37:19 crc kubenswrapper[4911]: I0929 21:37:19.753275 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rjfb4\" (UniqueName: \"kubernetes.io/projected/305ebfd8-5281-4a0f-9f5d-57db4028fa54-kube-api-access-rjfb4\") pod \"metallb-operator-controller-manager-5bf9d4f487-rzrcz\" (UID: \"305ebfd8-5281-4a0f-9f5d-57db4028fa54\") " pod="metallb-system/metallb-operator-controller-manager-5bf9d4f487-rzrcz" Sep 29 21:37:19 crc kubenswrapper[4911]: I0929 21:37:19.753316 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/305ebfd8-5281-4a0f-9f5d-57db4028fa54-apiservice-cert\") pod \"metallb-operator-controller-manager-5bf9d4f487-rzrcz\" (UID: \"305ebfd8-5281-4a0f-9f5d-57db4028fa54\") " pod="metallb-system/metallb-operator-controller-manager-5bf9d4f487-rzrcz" Sep 29 21:37:19 crc kubenswrapper[4911]: I0929 21:37:19.758855 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/305ebfd8-5281-4a0f-9f5d-57db4028fa54-apiservice-cert\") pod \"metallb-operator-controller-manager-5bf9d4f487-rzrcz\" (UID: \"305ebfd8-5281-4a0f-9f5d-57db4028fa54\") " pod="metallb-system/metallb-operator-controller-manager-5bf9d4f487-rzrcz" Sep 29 21:37:19 crc kubenswrapper[4911]: I0929 21:37:19.761321 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/305ebfd8-5281-4a0f-9f5d-57db4028fa54-webhook-cert\") pod \"metallb-operator-controller-manager-5bf9d4f487-rzrcz\" (UID: \"305ebfd8-5281-4a0f-9f5d-57db4028fa54\") " pod="metallb-system/metallb-operator-controller-manager-5bf9d4f487-rzrcz" Sep 29 21:37:19 crc kubenswrapper[4911]: I0929 21:37:19.769303 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rjfb4\" (UniqueName: \"kubernetes.io/projected/305ebfd8-5281-4a0f-9f5d-57db4028fa54-kube-api-access-rjfb4\") pod \"metallb-operator-controller-manager-5bf9d4f487-rzrcz\" (UID: \"305ebfd8-5281-4a0f-9f5d-57db4028fa54\") " pod="metallb-system/metallb-operator-controller-manager-5bf9d4f487-rzrcz" Sep 29 21:37:19 crc kubenswrapper[4911]: I0929 21:37:19.839762 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-6654f57795-4tpgk"] Sep 29 21:37:19 crc kubenswrapper[4911]: I0929 21:37:19.840446 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-6654f57795-4tpgk" Sep 29 21:37:19 crc kubenswrapper[4911]: I0929 21:37:19.847001 4911 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Sep 29 21:37:19 crc kubenswrapper[4911]: I0929 21:37:19.847622 4911 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-znhgb" Sep 29 21:37:19 crc kubenswrapper[4911]: I0929 21:37:19.847655 4911 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Sep 29 21:37:19 crc kubenswrapper[4911]: I0929 21:37:19.855771 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/72686e5d-383c-4860-bc1a-fb31a11f900d-webhook-cert\") pod \"metallb-operator-webhook-server-6654f57795-4tpgk\" (UID: \"72686e5d-383c-4860-bc1a-fb31a11f900d\") " pod="metallb-system/metallb-operator-webhook-server-6654f57795-4tpgk" Sep 29 21:37:19 crc kubenswrapper[4911]: I0929 21:37:19.855902 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/72686e5d-383c-4860-bc1a-fb31a11f900d-apiservice-cert\") pod \"metallb-operator-webhook-server-6654f57795-4tpgk\" (UID: \"72686e5d-383c-4860-bc1a-fb31a11f900d\") " pod="metallb-system/metallb-operator-webhook-server-6654f57795-4tpgk" Sep 29 21:37:19 crc kubenswrapper[4911]: I0929 21:37:19.855983 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vzc4h\" (UniqueName: \"kubernetes.io/projected/72686e5d-383c-4860-bc1a-fb31a11f900d-kube-api-access-vzc4h\") pod \"metallb-operator-webhook-server-6654f57795-4tpgk\" (UID: \"72686e5d-383c-4860-bc1a-fb31a11f900d\") " pod="metallb-system/metallb-operator-webhook-server-6654f57795-4tpgk" Sep 29 21:37:19 crc kubenswrapper[4911]: I0929 21:37:19.886034 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-6654f57795-4tpgk"] Sep 29 21:37:19 crc kubenswrapper[4911]: I0929 21:37:19.902808 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-5bf9d4f487-rzrcz" Sep 29 21:37:19 crc kubenswrapper[4911]: I0929 21:37:19.956974 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/72686e5d-383c-4860-bc1a-fb31a11f900d-apiservice-cert\") pod \"metallb-operator-webhook-server-6654f57795-4tpgk\" (UID: \"72686e5d-383c-4860-bc1a-fb31a11f900d\") " pod="metallb-system/metallb-operator-webhook-server-6654f57795-4tpgk" Sep 29 21:37:19 crc kubenswrapper[4911]: I0929 21:37:19.957044 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vzc4h\" (UniqueName: \"kubernetes.io/projected/72686e5d-383c-4860-bc1a-fb31a11f900d-kube-api-access-vzc4h\") pod \"metallb-operator-webhook-server-6654f57795-4tpgk\" (UID: \"72686e5d-383c-4860-bc1a-fb31a11f900d\") " pod="metallb-system/metallb-operator-webhook-server-6654f57795-4tpgk" Sep 29 21:37:19 crc kubenswrapper[4911]: I0929 21:37:19.957087 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/72686e5d-383c-4860-bc1a-fb31a11f900d-webhook-cert\") pod \"metallb-operator-webhook-server-6654f57795-4tpgk\" (UID: \"72686e5d-383c-4860-bc1a-fb31a11f900d\") " pod="metallb-system/metallb-operator-webhook-server-6654f57795-4tpgk" Sep 29 21:37:19 crc kubenswrapper[4911]: I0929 21:37:19.968977 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/72686e5d-383c-4860-bc1a-fb31a11f900d-apiservice-cert\") pod \"metallb-operator-webhook-server-6654f57795-4tpgk\" (UID: \"72686e5d-383c-4860-bc1a-fb31a11f900d\") " pod="metallb-system/metallb-operator-webhook-server-6654f57795-4tpgk" Sep 29 21:37:19 crc kubenswrapper[4911]: I0929 21:37:19.977346 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/72686e5d-383c-4860-bc1a-fb31a11f900d-webhook-cert\") pod \"metallb-operator-webhook-server-6654f57795-4tpgk\" (UID: \"72686e5d-383c-4860-bc1a-fb31a11f900d\") " pod="metallb-system/metallb-operator-webhook-server-6654f57795-4tpgk" Sep 29 21:37:19 crc kubenswrapper[4911]: I0929 21:37:19.990471 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vzc4h\" (UniqueName: \"kubernetes.io/projected/72686e5d-383c-4860-bc1a-fb31a11f900d-kube-api-access-vzc4h\") pod \"metallb-operator-webhook-server-6654f57795-4tpgk\" (UID: \"72686e5d-383c-4860-bc1a-fb31a11f900d\") " pod="metallb-system/metallb-operator-webhook-server-6654f57795-4tpgk" Sep 29 21:37:20 crc kubenswrapper[4911]: I0929 21:37:20.162953 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-6654f57795-4tpgk" Sep 29 21:37:20 crc kubenswrapper[4911]: I0929 21:37:20.368093 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-6654f57795-4tpgk"] Sep 29 21:37:20 crc kubenswrapper[4911]: W0929 21:37:20.373427 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod72686e5d_383c_4860_bc1a_fb31a11f900d.slice/crio-06a6a599c256de594ba1ea5a3321c676150d2ff4997d879bd000e8996656e554 WatchSource:0}: Error finding container 06a6a599c256de594ba1ea5a3321c676150d2ff4997d879bd000e8996656e554: Status 404 returned error can't find the container with id 06a6a599c256de594ba1ea5a3321c676150d2ff4997d879bd000e8996656e554 Sep 29 21:37:20 crc kubenswrapper[4911]: I0929 21:37:20.471505 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-6654f57795-4tpgk" event={"ID":"72686e5d-383c-4860-bc1a-fb31a11f900d","Type":"ContainerStarted","Data":"06a6a599c256de594ba1ea5a3321c676150d2ff4997d879bd000e8996656e554"} Sep 29 21:37:20 crc kubenswrapper[4911]: I0929 21:37:20.476154 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-5bf9d4f487-rzrcz"] Sep 29 21:37:20 crc kubenswrapper[4911]: W0929 21:37:20.484034 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod305ebfd8_5281_4a0f_9f5d_57db4028fa54.slice/crio-602d30796a7314a67d624211d67567807398e72aad1a5e77bac2b6a74cb14179 WatchSource:0}: Error finding container 602d30796a7314a67d624211d67567807398e72aad1a5e77bac2b6a74cb14179: Status 404 returned error can't find the container with id 602d30796a7314a67d624211d67567807398e72aad1a5e77bac2b6a74cb14179 Sep 29 21:37:21 crc kubenswrapper[4911]: I0929 21:37:21.479519 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-5bf9d4f487-rzrcz" event={"ID":"305ebfd8-5281-4a0f-9f5d-57db4028fa54","Type":"ContainerStarted","Data":"602d30796a7314a67d624211d67567807398e72aad1a5e77bac2b6a74cb14179"} Sep 29 21:37:25 crc kubenswrapper[4911]: I0929 21:37:25.211401 4911 patch_prober.go:28] interesting pod/machine-config-daemon-w647f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 21:37:25 crc kubenswrapper[4911]: I0929 21:37:25.211872 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 21:37:26 crc kubenswrapper[4911]: I0929 21:37:26.539891 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-5bf9d4f487-rzrcz" event={"ID":"305ebfd8-5281-4a0f-9f5d-57db4028fa54","Type":"ContainerStarted","Data":"1f074827ec5a4b218b4aa1dfc9e984adf3d8a06133f779e1db703cedfa9ec2e3"} Sep 29 21:37:26 crc kubenswrapper[4911]: I0929 21:37:26.540190 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-5bf9d4f487-rzrcz" Sep 29 21:37:26 crc kubenswrapper[4911]: I0929 21:37:26.542454 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-6654f57795-4tpgk" event={"ID":"72686e5d-383c-4860-bc1a-fb31a11f900d","Type":"ContainerStarted","Data":"9f5e48604c779889497094c45bea0cda9e95949c73242f360f06142d1e7a8cf0"} Sep 29 21:37:26 crc kubenswrapper[4911]: I0929 21:37:26.542634 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-6654f57795-4tpgk" Sep 29 21:37:26 crc kubenswrapper[4911]: I0929 21:37:26.567059 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-5bf9d4f487-rzrcz" podStartSLOduration=2.623401903 podStartE2EDuration="7.567037632s" podCreationTimestamp="2025-09-29 21:37:19 +0000 UTC" firstStartedPulling="2025-09-29 21:37:20.48838967 +0000 UTC m=+718.465502331" lastFinishedPulling="2025-09-29 21:37:25.432025389 +0000 UTC m=+723.409138060" observedRunningTime="2025-09-29 21:37:26.560507565 +0000 UTC m=+724.537620266" watchObservedRunningTime="2025-09-29 21:37:26.567037632 +0000 UTC m=+724.544150313" Sep 29 21:37:26 crc kubenswrapper[4911]: I0929 21:37:26.590744 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-6654f57795-4tpgk" podStartSLOduration=2.530962199 podStartE2EDuration="7.590727806s" podCreationTimestamp="2025-09-29 21:37:19 +0000 UTC" firstStartedPulling="2025-09-29 21:37:20.376087053 +0000 UTC m=+718.353199724" lastFinishedPulling="2025-09-29 21:37:25.43585265 +0000 UTC m=+723.412965331" observedRunningTime="2025-09-29 21:37:26.588628899 +0000 UTC m=+724.565741580" watchObservedRunningTime="2025-09-29 21:37:26.590727806 +0000 UTC m=+724.567840487" Sep 29 21:37:40 crc kubenswrapper[4911]: I0929 21:37:40.170045 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-6654f57795-4tpgk" Sep 29 21:37:50 crc kubenswrapper[4911]: I0929 21:37:50.336485 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-wb4m9"] Sep 29 21:37:50 crc kubenswrapper[4911]: I0929 21:37:50.337391 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-wb4m9" podUID="affba9da-62d0-47e6-b833-8b6c0e774fde" containerName="controller-manager" containerID="cri-o://9f99c7076904ef9c169fbcb7a0cbf852cbb6da1bb5edd06eeb404691be774243" gracePeriod=30 Sep 29 21:37:50 crc kubenswrapper[4911]: I0929 21:37:50.422628 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-2mk6j"] Sep 29 21:37:50 crc kubenswrapper[4911]: I0929 21:37:50.422837 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2mk6j" podUID="cd47cdce-41f4-416c-9436-4c386c50eb9e" containerName="route-controller-manager" containerID="cri-o://bf6eb2fd3b582c5f217d8baac15ba10340748781ed425b7bc2babe5caded140f" gracePeriod=30 Sep 29 21:37:50 crc kubenswrapper[4911]: I0929 21:37:50.696508 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-wb4m9" Sep 29 21:37:50 crc kubenswrapper[4911]: I0929 21:37:50.700063 4911 generic.go:334] "Generic (PLEG): container finished" podID="cd47cdce-41f4-416c-9436-4c386c50eb9e" containerID="bf6eb2fd3b582c5f217d8baac15ba10340748781ed425b7bc2babe5caded140f" exitCode=0 Sep 29 21:37:50 crc kubenswrapper[4911]: I0929 21:37:50.702197 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/affba9da-62d0-47e6-b833-8b6c0e774fde-config\") pod \"affba9da-62d0-47e6-b833-8b6c0e774fde\" (UID: \"affba9da-62d0-47e6-b833-8b6c0e774fde\") " Sep 29 21:37:50 crc kubenswrapper[4911]: I0929 21:37:50.702236 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/affba9da-62d0-47e6-b833-8b6c0e774fde-proxy-ca-bundles\") pod \"affba9da-62d0-47e6-b833-8b6c0e774fde\" (UID: \"affba9da-62d0-47e6-b833-8b6c0e774fde\") " Sep 29 21:37:50 crc kubenswrapper[4911]: I0929 21:37:50.702291 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-djxk9\" (UniqueName: \"kubernetes.io/projected/affba9da-62d0-47e6-b833-8b6c0e774fde-kube-api-access-djxk9\") pod \"affba9da-62d0-47e6-b833-8b6c0e774fde\" (UID: \"affba9da-62d0-47e6-b833-8b6c0e774fde\") " Sep 29 21:37:50 crc kubenswrapper[4911]: I0929 21:37:50.702357 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/affba9da-62d0-47e6-b833-8b6c0e774fde-client-ca\") pod \"affba9da-62d0-47e6-b833-8b6c0e774fde\" (UID: \"affba9da-62d0-47e6-b833-8b6c0e774fde\") " Sep 29 21:37:50 crc kubenswrapper[4911]: I0929 21:37:50.702387 4911 generic.go:334] "Generic (PLEG): container finished" podID="affba9da-62d0-47e6-b833-8b6c0e774fde" containerID="9f99c7076904ef9c169fbcb7a0cbf852cbb6da1bb5edd06eeb404691be774243" exitCode=0 Sep 29 21:37:50 crc kubenswrapper[4911]: I0929 21:37:50.702410 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/affba9da-62d0-47e6-b833-8b6c0e774fde-serving-cert\") pod \"affba9da-62d0-47e6-b833-8b6c0e774fde\" (UID: \"affba9da-62d0-47e6-b833-8b6c0e774fde\") " Sep 29 21:37:50 crc kubenswrapper[4911]: I0929 21:37:50.702430 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-wb4m9" Sep 29 21:37:50 crc kubenswrapper[4911]: I0929 21:37:50.703479 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/affba9da-62d0-47e6-b833-8b6c0e774fde-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "affba9da-62d0-47e6-b833-8b6c0e774fde" (UID: "affba9da-62d0-47e6-b833-8b6c0e774fde"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:37:50 crc kubenswrapper[4911]: I0929 21:37:50.703631 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/affba9da-62d0-47e6-b833-8b6c0e774fde-config" (OuterVolumeSpecName: "config") pod "affba9da-62d0-47e6-b833-8b6c0e774fde" (UID: "affba9da-62d0-47e6-b833-8b6c0e774fde"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:37:50 crc kubenswrapper[4911]: I0929 21:37:50.703644 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/affba9da-62d0-47e6-b833-8b6c0e774fde-client-ca" (OuterVolumeSpecName: "client-ca") pod "affba9da-62d0-47e6-b833-8b6c0e774fde" (UID: "affba9da-62d0-47e6-b833-8b6c0e774fde"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:37:50 crc kubenswrapper[4911]: I0929 21:37:50.710876 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/affba9da-62d0-47e6-b833-8b6c0e774fde-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "affba9da-62d0-47e6-b833-8b6c0e774fde" (UID: "affba9da-62d0-47e6-b833-8b6c0e774fde"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:37:50 crc kubenswrapper[4911]: I0929 21:37:50.714100 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/affba9da-62d0-47e6-b833-8b6c0e774fde-kube-api-access-djxk9" (OuterVolumeSpecName: "kube-api-access-djxk9") pod "affba9da-62d0-47e6-b833-8b6c0e774fde" (UID: "affba9da-62d0-47e6-b833-8b6c0e774fde"). InnerVolumeSpecName "kube-api-access-djxk9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:37:50 crc kubenswrapper[4911]: I0929 21:37:50.719811 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2mk6j" event={"ID":"cd47cdce-41f4-416c-9436-4c386c50eb9e","Type":"ContainerDied","Data":"bf6eb2fd3b582c5f217d8baac15ba10340748781ed425b7bc2babe5caded140f"} Sep 29 21:37:50 crc kubenswrapper[4911]: I0929 21:37:50.720090 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-wb4m9" event={"ID":"affba9da-62d0-47e6-b833-8b6c0e774fde","Type":"ContainerDied","Data":"9f99c7076904ef9c169fbcb7a0cbf852cbb6da1bb5edd06eeb404691be774243"} Sep 29 21:37:50 crc kubenswrapper[4911]: I0929 21:37:50.720116 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-wb4m9" event={"ID":"affba9da-62d0-47e6-b833-8b6c0e774fde","Type":"ContainerDied","Data":"8a3ec9a3bbea7a01ea52942466afb3a981c6471aec455a56e4bedd91f3378007"} Sep 29 21:37:50 crc kubenswrapper[4911]: I0929 21:37:50.720137 4911 scope.go:117] "RemoveContainer" containerID="9f99c7076904ef9c169fbcb7a0cbf852cbb6da1bb5edd06eeb404691be774243" Sep 29 21:37:50 crc kubenswrapper[4911]: I0929 21:37:50.745372 4911 scope.go:117] "RemoveContainer" containerID="9f99c7076904ef9c169fbcb7a0cbf852cbb6da1bb5edd06eeb404691be774243" Sep 29 21:37:50 crc kubenswrapper[4911]: E0929 21:37:50.745971 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9f99c7076904ef9c169fbcb7a0cbf852cbb6da1bb5edd06eeb404691be774243\": container with ID starting with 9f99c7076904ef9c169fbcb7a0cbf852cbb6da1bb5edd06eeb404691be774243 not found: ID does not exist" containerID="9f99c7076904ef9c169fbcb7a0cbf852cbb6da1bb5edd06eeb404691be774243" Sep 29 21:37:50 crc kubenswrapper[4911]: I0929 21:37:50.746022 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9f99c7076904ef9c169fbcb7a0cbf852cbb6da1bb5edd06eeb404691be774243"} err="failed to get container status \"9f99c7076904ef9c169fbcb7a0cbf852cbb6da1bb5edd06eeb404691be774243\": rpc error: code = NotFound desc = could not find container \"9f99c7076904ef9c169fbcb7a0cbf852cbb6da1bb5edd06eeb404691be774243\": container with ID starting with 9f99c7076904ef9c169fbcb7a0cbf852cbb6da1bb5edd06eeb404691be774243 not found: ID does not exist" Sep 29 21:37:50 crc kubenswrapper[4911]: I0929 21:37:50.771622 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2mk6j" Sep 29 21:37:50 crc kubenswrapper[4911]: I0929 21:37:50.803530 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd47cdce-41f4-416c-9436-4c386c50eb9e-config\") pod \"cd47cdce-41f4-416c-9436-4c386c50eb9e\" (UID: \"cd47cdce-41f4-416c-9436-4c386c50eb9e\") " Sep 29 21:37:50 crc kubenswrapper[4911]: I0929 21:37:50.803600 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vr22l\" (UniqueName: \"kubernetes.io/projected/cd47cdce-41f4-416c-9436-4c386c50eb9e-kube-api-access-vr22l\") pod \"cd47cdce-41f4-416c-9436-4c386c50eb9e\" (UID: \"cd47cdce-41f4-416c-9436-4c386c50eb9e\") " Sep 29 21:37:50 crc kubenswrapper[4911]: I0929 21:37:50.803634 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/cd47cdce-41f4-416c-9436-4c386c50eb9e-client-ca\") pod \"cd47cdce-41f4-416c-9436-4c386c50eb9e\" (UID: \"cd47cdce-41f4-416c-9436-4c386c50eb9e\") " Sep 29 21:37:50 crc kubenswrapper[4911]: I0929 21:37:50.803670 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cd47cdce-41f4-416c-9436-4c386c50eb9e-serving-cert\") pod \"cd47cdce-41f4-416c-9436-4c386c50eb9e\" (UID: \"cd47cdce-41f4-416c-9436-4c386c50eb9e\") " Sep 29 21:37:50 crc kubenswrapper[4911]: I0929 21:37:50.803978 4911 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/affba9da-62d0-47e6-b833-8b6c0e774fde-client-ca\") on node \"crc\" DevicePath \"\"" Sep 29 21:37:50 crc kubenswrapper[4911]: I0929 21:37:50.803993 4911 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/affba9da-62d0-47e6-b833-8b6c0e774fde-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 21:37:50 crc kubenswrapper[4911]: I0929 21:37:50.804001 4911 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/affba9da-62d0-47e6-b833-8b6c0e774fde-config\") on node \"crc\" DevicePath \"\"" Sep 29 21:37:50 crc kubenswrapper[4911]: I0929 21:37:50.804010 4911 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/affba9da-62d0-47e6-b833-8b6c0e774fde-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Sep 29 21:37:50 crc kubenswrapper[4911]: I0929 21:37:50.804019 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-djxk9\" (UniqueName: \"kubernetes.io/projected/affba9da-62d0-47e6-b833-8b6c0e774fde-kube-api-access-djxk9\") on node \"crc\" DevicePath \"\"" Sep 29 21:37:50 crc kubenswrapper[4911]: I0929 21:37:50.805145 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cd47cdce-41f4-416c-9436-4c386c50eb9e-client-ca" (OuterVolumeSpecName: "client-ca") pod "cd47cdce-41f4-416c-9436-4c386c50eb9e" (UID: "cd47cdce-41f4-416c-9436-4c386c50eb9e"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:37:50 crc kubenswrapper[4911]: I0929 21:37:50.805168 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cd47cdce-41f4-416c-9436-4c386c50eb9e-config" (OuterVolumeSpecName: "config") pod "cd47cdce-41f4-416c-9436-4c386c50eb9e" (UID: "cd47cdce-41f4-416c-9436-4c386c50eb9e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:37:50 crc kubenswrapper[4911]: I0929 21:37:50.807930 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd47cdce-41f4-416c-9436-4c386c50eb9e-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "cd47cdce-41f4-416c-9436-4c386c50eb9e" (UID: "cd47cdce-41f4-416c-9436-4c386c50eb9e"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:37:50 crc kubenswrapper[4911]: I0929 21:37:50.808344 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd47cdce-41f4-416c-9436-4c386c50eb9e-kube-api-access-vr22l" (OuterVolumeSpecName: "kube-api-access-vr22l") pod "cd47cdce-41f4-416c-9436-4c386c50eb9e" (UID: "cd47cdce-41f4-416c-9436-4c386c50eb9e"). InnerVolumeSpecName "kube-api-access-vr22l". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:37:50 crc kubenswrapper[4911]: I0929 21:37:50.904647 4911 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cd47cdce-41f4-416c-9436-4c386c50eb9e-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 21:37:50 crc kubenswrapper[4911]: I0929 21:37:50.904678 4911 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd47cdce-41f4-416c-9436-4c386c50eb9e-config\") on node \"crc\" DevicePath \"\"" Sep 29 21:37:50 crc kubenswrapper[4911]: I0929 21:37:50.904688 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vr22l\" (UniqueName: \"kubernetes.io/projected/cd47cdce-41f4-416c-9436-4c386c50eb9e-kube-api-access-vr22l\") on node \"crc\" DevicePath \"\"" Sep 29 21:37:50 crc kubenswrapper[4911]: I0929 21:37:50.904697 4911 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/cd47cdce-41f4-416c-9436-4c386c50eb9e-client-ca\") on node \"crc\" DevicePath \"\"" Sep 29 21:37:51 crc kubenswrapper[4911]: I0929 21:37:51.017403 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-wb4m9"] Sep 29 21:37:51 crc kubenswrapper[4911]: I0929 21:37:51.020924 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-wb4m9"] Sep 29 21:37:51 crc kubenswrapper[4911]: I0929 21:37:51.587384 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-c7767548d-l97j5"] Sep 29 21:37:51 crc kubenswrapper[4911]: E0929 21:37:51.587606 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="affba9da-62d0-47e6-b833-8b6c0e774fde" containerName="controller-manager" Sep 29 21:37:51 crc kubenswrapper[4911]: I0929 21:37:51.587618 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="affba9da-62d0-47e6-b833-8b6c0e774fde" containerName="controller-manager" Sep 29 21:37:51 crc kubenswrapper[4911]: E0929 21:37:51.587638 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd47cdce-41f4-416c-9436-4c386c50eb9e" containerName="route-controller-manager" Sep 29 21:37:51 crc kubenswrapper[4911]: I0929 21:37:51.587644 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd47cdce-41f4-416c-9436-4c386c50eb9e" containerName="route-controller-manager" Sep 29 21:37:51 crc kubenswrapper[4911]: I0929 21:37:51.587737 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd47cdce-41f4-416c-9436-4c386c50eb9e" containerName="route-controller-manager" Sep 29 21:37:51 crc kubenswrapper[4911]: I0929 21:37:51.587748 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="affba9da-62d0-47e6-b833-8b6c0e774fde" containerName="controller-manager" Sep 29 21:37:51 crc kubenswrapper[4911]: I0929 21:37:51.592072 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-867b6955c9-h54sd"] Sep 29 21:37:51 crc kubenswrapper[4911]: I0929 21:37:51.592574 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-c7767548d-l97j5" Sep 29 21:37:51 crc kubenswrapper[4911]: I0929 21:37:51.593520 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-867b6955c9-h54sd" Sep 29 21:37:51 crc kubenswrapper[4911]: I0929 21:37:51.602081 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-c7767548d-l97j5"] Sep 29 21:37:51 crc kubenswrapper[4911]: I0929 21:37:51.603421 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Sep 29 21:37:51 crc kubenswrapper[4911]: I0929 21:37:51.603670 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Sep 29 21:37:51 crc kubenswrapper[4911]: I0929 21:37:51.603802 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Sep 29 21:37:51 crc kubenswrapper[4911]: I0929 21:37:51.603893 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Sep 29 21:37:51 crc kubenswrapper[4911]: I0929 21:37:51.603967 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Sep 29 21:37:51 crc kubenswrapper[4911]: I0929 21:37:51.604365 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Sep 29 21:37:51 crc kubenswrapper[4911]: I0929 21:37:51.614285 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/35578aad-bd9c-4256-8ec3-9ded20bc994b-client-ca\") pod \"controller-manager-c7767548d-l97j5\" (UID: \"35578aad-bd9c-4256-8ec3-9ded20bc994b\") " pod="openshift-controller-manager/controller-manager-c7767548d-l97j5" Sep 29 21:37:51 crc kubenswrapper[4911]: I0929 21:37:51.614334 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/35578aad-bd9c-4256-8ec3-9ded20bc994b-serving-cert\") pod \"controller-manager-c7767548d-l97j5\" (UID: \"35578aad-bd9c-4256-8ec3-9ded20bc994b\") " pod="openshift-controller-manager/controller-manager-c7767548d-l97j5" Sep 29 21:37:51 crc kubenswrapper[4911]: I0929 21:37:51.614373 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zvnhh\" (UniqueName: \"kubernetes.io/projected/35578aad-bd9c-4256-8ec3-9ded20bc994b-kube-api-access-zvnhh\") pod \"controller-manager-c7767548d-l97j5\" (UID: \"35578aad-bd9c-4256-8ec3-9ded20bc994b\") " pod="openshift-controller-manager/controller-manager-c7767548d-l97j5" Sep 29 21:37:51 crc kubenswrapper[4911]: I0929 21:37:51.614403 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/35578aad-bd9c-4256-8ec3-9ded20bc994b-proxy-ca-bundles\") pod \"controller-manager-c7767548d-l97j5\" (UID: \"35578aad-bd9c-4256-8ec3-9ded20bc994b\") " pod="openshift-controller-manager/controller-manager-c7767548d-l97j5" Sep 29 21:37:51 crc kubenswrapper[4911]: I0929 21:37:51.614437 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bx86b\" (UniqueName: \"kubernetes.io/projected/379ad7b0-92de-419d-9fcf-4953cd717a89-kube-api-access-bx86b\") pod \"route-controller-manager-867b6955c9-h54sd\" (UID: \"379ad7b0-92de-419d-9fcf-4953cd717a89\") " pod="openshift-route-controller-manager/route-controller-manager-867b6955c9-h54sd" Sep 29 21:37:51 crc kubenswrapper[4911]: I0929 21:37:51.614473 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/35578aad-bd9c-4256-8ec3-9ded20bc994b-config\") pod \"controller-manager-c7767548d-l97j5\" (UID: \"35578aad-bd9c-4256-8ec3-9ded20bc994b\") " pod="openshift-controller-manager/controller-manager-c7767548d-l97j5" Sep 29 21:37:51 crc kubenswrapper[4911]: I0929 21:37:51.614506 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/379ad7b0-92de-419d-9fcf-4953cd717a89-client-ca\") pod \"route-controller-manager-867b6955c9-h54sd\" (UID: \"379ad7b0-92de-419d-9fcf-4953cd717a89\") " pod="openshift-route-controller-manager/route-controller-manager-867b6955c9-h54sd" Sep 29 21:37:51 crc kubenswrapper[4911]: I0929 21:37:51.614535 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/379ad7b0-92de-419d-9fcf-4953cd717a89-config\") pod \"route-controller-manager-867b6955c9-h54sd\" (UID: \"379ad7b0-92de-419d-9fcf-4953cd717a89\") " pod="openshift-route-controller-manager/route-controller-manager-867b6955c9-h54sd" Sep 29 21:37:51 crc kubenswrapper[4911]: I0929 21:37:51.614560 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/379ad7b0-92de-419d-9fcf-4953cd717a89-serving-cert\") pod \"route-controller-manager-867b6955c9-h54sd\" (UID: \"379ad7b0-92de-419d-9fcf-4953cd717a89\") " pod="openshift-route-controller-manager/route-controller-manager-867b6955c9-h54sd" Sep 29 21:37:51 crc kubenswrapper[4911]: I0929 21:37:51.632071 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Sep 29 21:37:51 crc kubenswrapper[4911]: I0929 21:37:51.637898 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-867b6955c9-h54sd"] Sep 29 21:37:51 crc kubenswrapper[4911]: I0929 21:37:51.708919 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2mk6j" event={"ID":"cd47cdce-41f4-416c-9436-4c386c50eb9e","Type":"ContainerDied","Data":"8611bf8580ace2af6afbb6c98553df67772f72b53097de3d1642fadd45422dc8"} Sep 29 21:37:51 crc kubenswrapper[4911]: I0929 21:37:51.709005 4911 scope.go:117] "RemoveContainer" containerID="bf6eb2fd3b582c5f217d8baac15ba10340748781ed425b7bc2babe5caded140f" Sep 29 21:37:51 crc kubenswrapper[4911]: I0929 21:37:51.709179 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2mk6j" Sep 29 21:37:51 crc kubenswrapper[4911]: I0929 21:37:51.716172 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bx86b\" (UniqueName: \"kubernetes.io/projected/379ad7b0-92de-419d-9fcf-4953cd717a89-kube-api-access-bx86b\") pod \"route-controller-manager-867b6955c9-h54sd\" (UID: \"379ad7b0-92de-419d-9fcf-4953cd717a89\") " pod="openshift-route-controller-manager/route-controller-manager-867b6955c9-h54sd" Sep 29 21:37:51 crc kubenswrapper[4911]: I0929 21:37:51.716286 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/35578aad-bd9c-4256-8ec3-9ded20bc994b-config\") pod \"controller-manager-c7767548d-l97j5\" (UID: \"35578aad-bd9c-4256-8ec3-9ded20bc994b\") " pod="openshift-controller-manager/controller-manager-c7767548d-l97j5" Sep 29 21:37:51 crc kubenswrapper[4911]: I0929 21:37:51.716360 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/379ad7b0-92de-419d-9fcf-4953cd717a89-client-ca\") pod \"route-controller-manager-867b6955c9-h54sd\" (UID: \"379ad7b0-92de-419d-9fcf-4953cd717a89\") " pod="openshift-route-controller-manager/route-controller-manager-867b6955c9-h54sd" Sep 29 21:37:51 crc kubenswrapper[4911]: I0929 21:37:51.716412 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/379ad7b0-92de-419d-9fcf-4953cd717a89-config\") pod \"route-controller-manager-867b6955c9-h54sd\" (UID: \"379ad7b0-92de-419d-9fcf-4953cd717a89\") " pod="openshift-route-controller-manager/route-controller-manager-867b6955c9-h54sd" Sep 29 21:37:51 crc kubenswrapper[4911]: I0929 21:37:51.716510 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/379ad7b0-92de-419d-9fcf-4953cd717a89-serving-cert\") pod \"route-controller-manager-867b6955c9-h54sd\" (UID: \"379ad7b0-92de-419d-9fcf-4953cd717a89\") " pod="openshift-route-controller-manager/route-controller-manager-867b6955c9-h54sd" Sep 29 21:37:51 crc kubenswrapper[4911]: I0929 21:37:51.716678 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/35578aad-bd9c-4256-8ec3-9ded20bc994b-client-ca\") pod \"controller-manager-c7767548d-l97j5\" (UID: \"35578aad-bd9c-4256-8ec3-9ded20bc994b\") " pod="openshift-controller-manager/controller-manager-c7767548d-l97j5" Sep 29 21:37:51 crc kubenswrapper[4911]: I0929 21:37:51.716710 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/35578aad-bd9c-4256-8ec3-9ded20bc994b-serving-cert\") pod \"controller-manager-c7767548d-l97j5\" (UID: \"35578aad-bd9c-4256-8ec3-9ded20bc994b\") " pod="openshift-controller-manager/controller-manager-c7767548d-l97j5" Sep 29 21:37:51 crc kubenswrapper[4911]: I0929 21:37:51.716757 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zvnhh\" (UniqueName: \"kubernetes.io/projected/35578aad-bd9c-4256-8ec3-9ded20bc994b-kube-api-access-zvnhh\") pod \"controller-manager-c7767548d-l97j5\" (UID: \"35578aad-bd9c-4256-8ec3-9ded20bc994b\") " pod="openshift-controller-manager/controller-manager-c7767548d-l97j5" Sep 29 21:37:51 crc kubenswrapper[4911]: I0929 21:37:51.716836 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/35578aad-bd9c-4256-8ec3-9ded20bc994b-proxy-ca-bundles\") pod \"controller-manager-c7767548d-l97j5\" (UID: \"35578aad-bd9c-4256-8ec3-9ded20bc994b\") " pod="openshift-controller-manager/controller-manager-c7767548d-l97j5" Sep 29 21:37:51 crc kubenswrapper[4911]: I0929 21:37:51.718011 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/35578aad-bd9c-4256-8ec3-9ded20bc994b-proxy-ca-bundles\") pod \"controller-manager-c7767548d-l97j5\" (UID: \"35578aad-bd9c-4256-8ec3-9ded20bc994b\") " pod="openshift-controller-manager/controller-manager-c7767548d-l97j5" Sep 29 21:37:51 crc kubenswrapper[4911]: I0929 21:37:51.718592 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/379ad7b0-92de-419d-9fcf-4953cd717a89-client-ca\") pod \"route-controller-manager-867b6955c9-h54sd\" (UID: \"379ad7b0-92de-419d-9fcf-4953cd717a89\") " pod="openshift-route-controller-manager/route-controller-manager-867b6955c9-h54sd" Sep 29 21:37:51 crc kubenswrapper[4911]: I0929 21:37:51.718815 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/35578aad-bd9c-4256-8ec3-9ded20bc994b-client-ca\") pod \"controller-manager-c7767548d-l97j5\" (UID: \"35578aad-bd9c-4256-8ec3-9ded20bc994b\") " pod="openshift-controller-manager/controller-manager-c7767548d-l97j5" Sep 29 21:37:51 crc kubenswrapper[4911]: I0929 21:37:51.718956 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/35578aad-bd9c-4256-8ec3-9ded20bc994b-config\") pod \"controller-manager-c7767548d-l97j5\" (UID: \"35578aad-bd9c-4256-8ec3-9ded20bc994b\") " pod="openshift-controller-manager/controller-manager-c7767548d-l97j5" Sep 29 21:37:51 crc kubenswrapper[4911]: I0929 21:37:51.719891 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/379ad7b0-92de-419d-9fcf-4953cd717a89-config\") pod \"route-controller-manager-867b6955c9-h54sd\" (UID: \"379ad7b0-92de-419d-9fcf-4953cd717a89\") " pod="openshift-route-controller-manager/route-controller-manager-867b6955c9-h54sd" Sep 29 21:37:51 crc kubenswrapper[4911]: I0929 21:37:51.725116 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/379ad7b0-92de-419d-9fcf-4953cd717a89-serving-cert\") pod \"route-controller-manager-867b6955c9-h54sd\" (UID: \"379ad7b0-92de-419d-9fcf-4953cd717a89\") " pod="openshift-route-controller-manager/route-controller-manager-867b6955c9-h54sd" Sep 29 21:37:51 crc kubenswrapper[4911]: I0929 21:37:51.737127 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/35578aad-bd9c-4256-8ec3-9ded20bc994b-serving-cert\") pod \"controller-manager-c7767548d-l97j5\" (UID: \"35578aad-bd9c-4256-8ec3-9ded20bc994b\") " pod="openshift-controller-manager/controller-manager-c7767548d-l97j5" Sep 29 21:37:51 crc kubenswrapper[4911]: I0929 21:37:51.744442 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bx86b\" (UniqueName: \"kubernetes.io/projected/379ad7b0-92de-419d-9fcf-4953cd717a89-kube-api-access-bx86b\") pod \"route-controller-manager-867b6955c9-h54sd\" (UID: \"379ad7b0-92de-419d-9fcf-4953cd717a89\") " pod="openshift-route-controller-manager/route-controller-manager-867b6955c9-h54sd" Sep 29 21:37:51 crc kubenswrapper[4911]: I0929 21:37:51.749022 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zvnhh\" (UniqueName: \"kubernetes.io/projected/35578aad-bd9c-4256-8ec3-9ded20bc994b-kube-api-access-zvnhh\") pod \"controller-manager-c7767548d-l97j5\" (UID: \"35578aad-bd9c-4256-8ec3-9ded20bc994b\") " pod="openshift-controller-manager/controller-manager-c7767548d-l97j5" Sep 29 21:37:51 crc kubenswrapper[4911]: I0929 21:37:51.751905 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-2mk6j"] Sep 29 21:37:51 crc kubenswrapper[4911]: I0929 21:37:51.753073 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-2mk6j"] Sep 29 21:37:51 crc kubenswrapper[4911]: I0929 21:37:51.915510 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-c7767548d-l97j5" Sep 29 21:37:51 crc kubenswrapper[4911]: I0929 21:37:51.939114 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-867b6955c9-h54sd" Sep 29 21:37:52 crc kubenswrapper[4911]: I0929 21:37:52.214174 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-c7767548d-l97j5"] Sep 29 21:37:52 crc kubenswrapper[4911]: I0929 21:37:52.254647 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-867b6955c9-h54sd"] Sep 29 21:37:52 crc kubenswrapper[4911]: W0929 21:37:52.269009 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod379ad7b0_92de_419d_9fcf_4953cd717a89.slice/crio-1a8a514d75252fffa838043d13628690005d4abeadea8a4767c1a4da2a7083df WatchSource:0}: Error finding container 1a8a514d75252fffa838043d13628690005d4abeadea8a4767c1a4da2a7083df: Status 404 returned error can't find the container with id 1a8a514d75252fffa838043d13628690005d4abeadea8a4767c1a4da2a7083df Sep 29 21:37:52 crc kubenswrapper[4911]: I0929 21:37:52.712708 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="affba9da-62d0-47e6-b833-8b6c0e774fde" path="/var/lib/kubelet/pods/affba9da-62d0-47e6-b833-8b6c0e774fde/volumes" Sep 29 21:37:52 crc kubenswrapper[4911]: I0929 21:37:52.714767 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd47cdce-41f4-416c-9436-4c386c50eb9e" path="/var/lib/kubelet/pods/cd47cdce-41f4-416c-9436-4c386c50eb9e/volumes" Sep 29 21:37:52 crc kubenswrapper[4911]: I0929 21:37:52.726401 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-c7767548d-l97j5" event={"ID":"35578aad-bd9c-4256-8ec3-9ded20bc994b","Type":"ContainerStarted","Data":"50d9bfcf1e340a983b70ec9f7fc70891526f9c5fe160ee6f23c315e3934c14fb"} Sep 29 21:37:52 crc kubenswrapper[4911]: I0929 21:37:52.726450 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-c7767548d-l97j5" event={"ID":"35578aad-bd9c-4256-8ec3-9ded20bc994b","Type":"ContainerStarted","Data":"870b9e63875652cce842e435ed69be6efc3ff070da8afe725f08cd9d402d7905"} Sep 29 21:37:52 crc kubenswrapper[4911]: I0929 21:37:52.727294 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-c7767548d-l97j5" Sep 29 21:37:52 crc kubenswrapper[4911]: I0929 21:37:52.732103 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-c7767548d-l97j5" Sep 29 21:37:52 crc kubenswrapper[4911]: I0929 21:37:52.736097 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-867b6955c9-h54sd" event={"ID":"379ad7b0-92de-419d-9fcf-4953cd717a89","Type":"ContainerStarted","Data":"166cca93c49038f7ad68f3ba2e9ac63ffc85f024fac067e18dceb0135212ca26"} Sep 29 21:37:52 crc kubenswrapper[4911]: I0929 21:37:52.736130 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-867b6955c9-h54sd" event={"ID":"379ad7b0-92de-419d-9fcf-4953cd717a89","Type":"ContainerStarted","Data":"1a8a514d75252fffa838043d13628690005d4abeadea8a4767c1a4da2a7083df"} Sep 29 21:37:52 crc kubenswrapper[4911]: I0929 21:37:52.736786 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-867b6955c9-h54sd" Sep 29 21:37:52 crc kubenswrapper[4911]: I0929 21:37:52.774232 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-c7767548d-l97j5" podStartSLOduration=2.774218232 podStartE2EDuration="2.774218232s" podCreationTimestamp="2025-09-29 21:37:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:37:52.771892548 +0000 UTC m=+750.749005219" watchObservedRunningTime="2025-09-29 21:37:52.774218232 +0000 UTC m=+750.751330903" Sep 29 21:37:52 crc kubenswrapper[4911]: I0929 21:37:52.804319 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-867b6955c9-h54sd" podStartSLOduration=2.804304508 podStartE2EDuration="2.804304508s" podCreationTimestamp="2025-09-29 21:37:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:37:52.801370715 +0000 UTC m=+750.778483386" watchObservedRunningTime="2025-09-29 21:37:52.804304508 +0000 UTC m=+750.781417179" Sep 29 21:37:52 crc kubenswrapper[4911]: I0929 21:37:52.846765 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-867b6955c9-h54sd" Sep 29 21:37:55 crc kubenswrapper[4911]: I0929 21:37:55.211136 4911 patch_prober.go:28] interesting pod/machine-config-daemon-w647f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 21:37:55 crc kubenswrapper[4911]: I0929 21:37:55.211245 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 21:37:55 crc kubenswrapper[4911]: I0929 21:37:55.211314 4911 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-w647f" Sep 29 21:37:55 crc kubenswrapper[4911]: I0929 21:37:55.212231 4911 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"3f4ab040675bb23b2a12316fff86293a5b72278bad6949dfbe357c01f7df89f3"} pod="openshift-machine-config-operator/machine-config-daemon-w647f" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 21:37:55 crc kubenswrapper[4911]: I0929 21:37:55.212332 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" containerName="machine-config-daemon" containerID="cri-o://3f4ab040675bb23b2a12316fff86293a5b72278bad6949dfbe357c01f7df89f3" gracePeriod=600 Sep 29 21:37:55 crc kubenswrapper[4911]: I0929 21:37:55.761271 4911 generic.go:334] "Generic (PLEG): container finished" podID="50640abc-40db-4390-82d1-f3cfc76da71c" containerID="3f4ab040675bb23b2a12316fff86293a5b72278bad6949dfbe357c01f7df89f3" exitCode=0 Sep 29 21:37:55 crc kubenswrapper[4911]: I0929 21:37:55.761310 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w647f" event={"ID":"50640abc-40db-4390-82d1-f3cfc76da71c","Type":"ContainerDied","Data":"3f4ab040675bb23b2a12316fff86293a5b72278bad6949dfbe357c01f7df89f3"} Sep 29 21:37:55 crc kubenswrapper[4911]: I0929 21:37:55.761784 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w647f" event={"ID":"50640abc-40db-4390-82d1-f3cfc76da71c","Type":"ContainerStarted","Data":"5dec289dec4d2c01af51bffa09906f4044b26096f9186eb8b0d1b24f0055ec27"} Sep 29 21:37:55 crc kubenswrapper[4911]: I0929 21:37:55.761834 4911 scope.go:117] "RemoveContainer" containerID="169a8ea1d84bd44d55b71bca47978a16d348f46e726c92490e98ec486b65a803" Sep 29 21:37:58 crc kubenswrapper[4911]: I0929 21:37:58.145469 4911 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Sep 29 21:37:59 crc kubenswrapper[4911]: I0929 21:37:59.906649 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-5bf9d4f487-rzrcz" Sep 29 21:38:00 crc kubenswrapper[4911]: I0929 21:38:00.737698 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-lj8ph"] Sep 29 21:38:00 crc kubenswrapper[4911]: I0929 21:38:00.741124 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-lj8ph" Sep 29 21:38:00 crc kubenswrapper[4911]: I0929 21:38:00.743497 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Sep 29 21:38:00 crc kubenswrapper[4911]: I0929 21:38:00.743633 4911 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-8qrr6" Sep 29 21:38:00 crc kubenswrapper[4911]: I0929 21:38:00.743877 4911 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Sep 29 21:38:00 crc kubenswrapper[4911]: I0929 21:38:00.765691 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-5478bdb765-5ztnx"] Sep 29 21:38:00 crc kubenswrapper[4911]: I0929 21:38:00.768658 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-5ztnx" Sep 29 21:38:00 crc kubenswrapper[4911]: I0929 21:38:00.770425 4911 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Sep 29 21:38:00 crc kubenswrapper[4911]: I0929 21:38:00.782143 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-5478bdb765-5ztnx"] Sep 29 21:38:00 crc kubenswrapper[4911]: I0929 21:38:00.851162 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/cb4339ee-098d-428e-89a8-d57aec12356c-frr-startup\") pod \"frr-k8s-lj8ph\" (UID: \"cb4339ee-098d-428e-89a8-d57aec12356c\") " pod="metallb-system/frr-k8s-lj8ph" Sep 29 21:38:00 crc kubenswrapper[4911]: I0929 21:38:00.851267 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/cb4339ee-098d-428e-89a8-d57aec12356c-reloader\") pod \"frr-k8s-lj8ph\" (UID: \"cb4339ee-098d-428e-89a8-d57aec12356c\") " pod="metallb-system/frr-k8s-lj8ph" Sep 29 21:38:00 crc kubenswrapper[4911]: I0929 21:38:00.851350 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mtf8l\" (UniqueName: \"kubernetes.io/projected/cb4339ee-098d-428e-89a8-d57aec12356c-kube-api-access-mtf8l\") pod \"frr-k8s-lj8ph\" (UID: \"cb4339ee-098d-428e-89a8-d57aec12356c\") " pod="metallb-system/frr-k8s-lj8ph" Sep 29 21:38:00 crc kubenswrapper[4911]: I0929 21:38:00.851378 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/cb4339ee-098d-428e-89a8-d57aec12356c-frr-conf\") pod \"frr-k8s-lj8ph\" (UID: \"cb4339ee-098d-428e-89a8-d57aec12356c\") " pod="metallb-system/frr-k8s-lj8ph" Sep 29 21:38:00 crc kubenswrapper[4911]: I0929 21:38:00.851429 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/cb4339ee-098d-428e-89a8-d57aec12356c-frr-sockets\") pod \"frr-k8s-lj8ph\" (UID: \"cb4339ee-098d-428e-89a8-d57aec12356c\") " pod="metallb-system/frr-k8s-lj8ph" Sep 29 21:38:00 crc kubenswrapper[4911]: I0929 21:38:00.851458 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/cb4339ee-098d-428e-89a8-d57aec12356c-metrics\") pod \"frr-k8s-lj8ph\" (UID: \"cb4339ee-098d-428e-89a8-d57aec12356c\") " pod="metallb-system/frr-k8s-lj8ph" Sep 29 21:38:00 crc kubenswrapper[4911]: I0929 21:38:00.851476 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/cb4339ee-098d-428e-89a8-d57aec12356c-metrics-certs\") pod \"frr-k8s-lj8ph\" (UID: \"cb4339ee-098d-428e-89a8-d57aec12356c\") " pod="metallb-system/frr-k8s-lj8ph" Sep 29 21:38:00 crc kubenswrapper[4911]: I0929 21:38:00.851495 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/000d716a-9bed-4422-8a16-8598ff854239-cert\") pod \"frr-k8s-webhook-server-5478bdb765-5ztnx\" (UID: \"000d716a-9bed-4422-8a16-8598ff854239\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-5ztnx" Sep 29 21:38:00 crc kubenswrapper[4911]: I0929 21:38:00.851513 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vv5ht\" (UniqueName: \"kubernetes.io/projected/000d716a-9bed-4422-8a16-8598ff854239-kube-api-access-vv5ht\") pod \"frr-k8s-webhook-server-5478bdb765-5ztnx\" (UID: \"000d716a-9bed-4422-8a16-8598ff854239\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-5ztnx" Sep 29 21:38:00 crc kubenswrapper[4911]: I0929 21:38:00.880106 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-jxhnw"] Sep 29 21:38:00 crc kubenswrapper[4911]: I0929 21:38:00.881747 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-jxhnw" Sep 29 21:38:00 crc kubenswrapper[4911]: I0929 21:38:00.893561 4911 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Sep 29 21:38:00 crc kubenswrapper[4911]: I0929 21:38:00.893685 4911 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Sep 29 21:38:00 crc kubenswrapper[4911]: I0929 21:38:00.893989 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Sep 29 21:38:00 crc kubenswrapper[4911]: I0929 21:38:00.894264 4911 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-nnrx5" Sep 29 21:38:00 crc kubenswrapper[4911]: I0929 21:38:00.895326 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-5d688f5ffc-xxmg2"] Sep 29 21:38:00 crc kubenswrapper[4911]: I0929 21:38:00.896497 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-5d688f5ffc-xxmg2" Sep 29 21:38:00 crc kubenswrapper[4911]: I0929 21:38:00.898735 4911 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Sep 29 21:38:00 crc kubenswrapper[4911]: I0929 21:38:00.904282 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-5d688f5ffc-xxmg2"] Sep 29 21:38:00 crc kubenswrapper[4911]: I0929 21:38:00.953407 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/fef69310-fa8e-4fa6-b35c-1347023377d8-cert\") pod \"controller-5d688f5ffc-xxmg2\" (UID: \"fef69310-fa8e-4fa6-b35c-1347023377d8\") " pod="metallb-system/controller-5d688f5ffc-xxmg2" Sep 29 21:38:00 crc kubenswrapper[4911]: I0929 21:38:00.953468 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/a428871b-77b8-46c9-8886-fa3eb5b2e108-metallb-excludel2\") pod \"speaker-jxhnw\" (UID: \"a428871b-77b8-46c9-8886-fa3eb5b2e108\") " pod="metallb-system/speaker-jxhnw" Sep 29 21:38:00 crc kubenswrapper[4911]: I0929 21:38:00.953492 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/a428871b-77b8-46c9-8886-fa3eb5b2e108-memberlist\") pod \"speaker-jxhnw\" (UID: \"a428871b-77b8-46c9-8886-fa3eb5b2e108\") " pod="metallb-system/speaker-jxhnw" Sep 29 21:38:00 crc kubenswrapper[4911]: I0929 21:38:00.953522 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mtf8l\" (UniqueName: \"kubernetes.io/projected/cb4339ee-098d-428e-89a8-d57aec12356c-kube-api-access-mtf8l\") pod \"frr-k8s-lj8ph\" (UID: \"cb4339ee-098d-428e-89a8-d57aec12356c\") " pod="metallb-system/frr-k8s-lj8ph" Sep 29 21:38:00 crc kubenswrapper[4911]: I0929 21:38:00.953540 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a428871b-77b8-46c9-8886-fa3eb5b2e108-metrics-certs\") pod \"speaker-jxhnw\" (UID: \"a428871b-77b8-46c9-8886-fa3eb5b2e108\") " pod="metallb-system/speaker-jxhnw" Sep 29 21:38:00 crc kubenswrapper[4911]: I0929 21:38:00.953561 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/cb4339ee-098d-428e-89a8-d57aec12356c-frr-conf\") pod \"frr-k8s-lj8ph\" (UID: \"cb4339ee-098d-428e-89a8-d57aec12356c\") " pod="metallb-system/frr-k8s-lj8ph" Sep 29 21:38:00 crc kubenswrapper[4911]: I0929 21:38:00.953612 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/cb4339ee-098d-428e-89a8-d57aec12356c-frr-sockets\") pod \"frr-k8s-lj8ph\" (UID: \"cb4339ee-098d-428e-89a8-d57aec12356c\") " pod="metallb-system/frr-k8s-lj8ph" Sep 29 21:38:00 crc kubenswrapper[4911]: I0929 21:38:00.953644 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/cb4339ee-098d-428e-89a8-d57aec12356c-metrics\") pod \"frr-k8s-lj8ph\" (UID: \"cb4339ee-098d-428e-89a8-d57aec12356c\") " pod="metallb-system/frr-k8s-lj8ph" Sep 29 21:38:00 crc kubenswrapper[4911]: I0929 21:38:00.953671 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/cb4339ee-098d-428e-89a8-d57aec12356c-metrics-certs\") pod \"frr-k8s-lj8ph\" (UID: \"cb4339ee-098d-428e-89a8-d57aec12356c\") " pod="metallb-system/frr-k8s-lj8ph" Sep 29 21:38:00 crc kubenswrapper[4911]: I0929 21:38:00.953692 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kmkp6\" (UniqueName: \"kubernetes.io/projected/fef69310-fa8e-4fa6-b35c-1347023377d8-kube-api-access-kmkp6\") pod \"controller-5d688f5ffc-xxmg2\" (UID: \"fef69310-fa8e-4fa6-b35c-1347023377d8\") " pod="metallb-system/controller-5d688f5ffc-xxmg2" Sep 29 21:38:00 crc kubenswrapper[4911]: I0929 21:38:00.953718 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/000d716a-9bed-4422-8a16-8598ff854239-cert\") pod \"frr-k8s-webhook-server-5478bdb765-5ztnx\" (UID: \"000d716a-9bed-4422-8a16-8598ff854239\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-5ztnx" Sep 29 21:38:00 crc kubenswrapper[4911]: I0929 21:38:00.953739 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vv5ht\" (UniqueName: \"kubernetes.io/projected/000d716a-9bed-4422-8a16-8598ff854239-kube-api-access-vv5ht\") pod \"frr-k8s-webhook-server-5478bdb765-5ztnx\" (UID: \"000d716a-9bed-4422-8a16-8598ff854239\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-5ztnx" Sep 29 21:38:00 crc kubenswrapper[4911]: I0929 21:38:00.953760 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/fef69310-fa8e-4fa6-b35c-1347023377d8-metrics-certs\") pod \"controller-5d688f5ffc-xxmg2\" (UID: \"fef69310-fa8e-4fa6-b35c-1347023377d8\") " pod="metallb-system/controller-5d688f5ffc-xxmg2" Sep 29 21:38:00 crc kubenswrapper[4911]: I0929 21:38:00.953804 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/cb4339ee-098d-428e-89a8-d57aec12356c-frr-startup\") pod \"frr-k8s-lj8ph\" (UID: \"cb4339ee-098d-428e-89a8-d57aec12356c\") " pod="metallb-system/frr-k8s-lj8ph" Sep 29 21:38:00 crc kubenswrapper[4911]: I0929 21:38:00.953865 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/cb4339ee-098d-428e-89a8-d57aec12356c-reloader\") pod \"frr-k8s-lj8ph\" (UID: \"cb4339ee-098d-428e-89a8-d57aec12356c\") " pod="metallb-system/frr-k8s-lj8ph" Sep 29 21:38:00 crc kubenswrapper[4911]: I0929 21:38:00.953886 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7vzp7\" (UniqueName: \"kubernetes.io/projected/a428871b-77b8-46c9-8886-fa3eb5b2e108-kube-api-access-7vzp7\") pod \"speaker-jxhnw\" (UID: \"a428871b-77b8-46c9-8886-fa3eb5b2e108\") " pod="metallb-system/speaker-jxhnw" Sep 29 21:38:00 crc kubenswrapper[4911]: E0929 21:38:00.954011 4911 secret.go:188] Couldn't get secret metallb-system/frr-k8s-webhook-server-cert: secret "frr-k8s-webhook-server-cert" not found Sep 29 21:38:00 crc kubenswrapper[4911]: E0929 21:38:00.954071 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/000d716a-9bed-4422-8a16-8598ff854239-cert podName:000d716a-9bed-4422-8a16-8598ff854239 nodeName:}" failed. No retries permitted until 2025-09-29 21:38:01.45404765 +0000 UTC m=+759.431160321 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/000d716a-9bed-4422-8a16-8598ff854239-cert") pod "frr-k8s-webhook-server-5478bdb765-5ztnx" (UID: "000d716a-9bed-4422-8a16-8598ff854239") : secret "frr-k8s-webhook-server-cert" not found Sep 29 21:38:00 crc kubenswrapper[4911]: I0929 21:38:00.954106 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/cb4339ee-098d-428e-89a8-d57aec12356c-frr-conf\") pod \"frr-k8s-lj8ph\" (UID: \"cb4339ee-098d-428e-89a8-d57aec12356c\") " pod="metallb-system/frr-k8s-lj8ph" Sep 29 21:38:00 crc kubenswrapper[4911]: I0929 21:38:00.954311 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/cb4339ee-098d-428e-89a8-d57aec12356c-frr-sockets\") pod \"frr-k8s-lj8ph\" (UID: \"cb4339ee-098d-428e-89a8-d57aec12356c\") " pod="metallb-system/frr-k8s-lj8ph" Sep 29 21:38:00 crc kubenswrapper[4911]: I0929 21:38:00.954474 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/cb4339ee-098d-428e-89a8-d57aec12356c-metrics\") pod \"frr-k8s-lj8ph\" (UID: \"cb4339ee-098d-428e-89a8-d57aec12356c\") " pod="metallb-system/frr-k8s-lj8ph" Sep 29 21:38:00 crc kubenswrapper[4911]: I0929 21:38:00.955086 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/cb4339ee-098d-428e-89a8-d57aec12356c-reloader\") pod \"frr-k8s-lj8ph\" (UID: \"cb4339ee-098d-428e-89a8-d57aec12356c\") " pod="metallb-system/frr-k8s-lj8ph" Sep 29 21:38:00 crc kubenswrapper[4911]: I0929 21:38:00.955661 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/cb4339ee-098d-428e-89a8-d57aec12356c-frr-startup\") pod \"frr-k8s-lj8ph\" (UID: \"cb4339ee-098d-428e-89a8-d57aec12356c\") " pod="metallb-system/frr-k8s-lj8ph" Sep 29 21:38:00 crc kubenswrapper[4911]: I0929 21:38:00.967611 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/cb4339ee-098d-428e-89a8-d57aec12356c-metrics-certs\") pod \"frr-k8s-lj8ph\" (UID: \"cb4339ee-098d-428e-89a8-d57aec12356c\") " pod="metallb-system/frr-k8s-lj8ph" Sep 29 21:38:00 crc kubenswrapper[4911]: I0929 21:38:00.971385 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mtf8l\" (UniqueName: \"kubernetes.io/projected/cb4339ee-098d-428e-89a8-d57aec12356c-kube-api-access-mtf8l\") pod \"frr-k8s-lj8ph\" (UID: \"cb4339ee-098d-428e-89a8-d57aec12356c\") " pod="metallb-system/frr-k8s-lj8ph" Sep 29 21:38:00 crc kubenswrapper[4911]: I0929 21:38:00.972392 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vv5ht\" (UniqueName: \"kubernetes.io/projected/000d716a-9bed-4422-8a16-8598ff854239-kube-api-access-vv5ht\") pod \"frr-k8s-webhook-server-5478bdb765-5ztnx\" (UID: \"000d716a-9bed-4422-8a16-8598ff854239\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-5ztnx" Sep 29 21:38:01 crc kubenswrapper[4911]: I0929 21:38:01.055069 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/a428871b-77b8-46c9-8886-fa3eb5b2e108-metallb-excludel2\") pod \"speaker-jxhnw\" (UID: \"a428871b-77b8-46c9-8886-fa3eb5b2e108\") " pod="metallb-system/speaker-jxhnw" Sep 29 21:38:01 crc kubenswrapper[4911]: I0929 21:38:01.055535 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/a428871b-77b8-46c9-8886-fa3eb5b2e108-memberlist\") pod \"speaker-jxhnw\" (UID: \"a428871b-77b8-46c9-8886-fa3eb5b2e108\") " pod="metallb-system/speaker-jxhnw" Sep 29 21:38:01 crc kubenswrapper[4911]: I0929 21:38:01.055561 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a428871b-77b8-46c9-8886-fa3eb5b2e108-metrics-certs\") pod \"speaker-jxhnw\" (UID: \"a428871b-77b8-46c9-8886-fa3eb5b2e108\") " pod="metallb-system/speaker-jxhnw" Sep 29 21:38:01 crc kubenswrapper[4911]: I0929 21:38:01.055645 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kmkp6\" (UniqueName: \"kubernetes.io/projected/fef69310-fa8e-4fa6-b35c-1347023377d8-kube-api-access-kmkp6\") pod \"controller-5d688f5ffc-xxmg2\" (UID: \"fef69310-fa8e-4fa6-b35c-1347023377d8\") " pod="metallb-system/controller-5d688f5ffc-xxmg2" Sep 29 21:38:01 crc kubenswrapper[4911]: I0929 21:38:01.055691 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/fef69310-fa8e-4fa6-b35c-1347023377d8-metrics-certs\") pod \"controller-5d688f5ffc-xxmg2\" (UID: \"fef69310-fa8e-4fa6-b35c-1347023377d8\") " pod="metallb-system/controller-5d688f5ffc-xxmg2" Sep 29 21:38:01 crc kubenswrapper[4911]: I0929 21:38:01.055738 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7vzp7\" (UniqueName: \"kubernetes.io/projected/a428871b-77b8-46c9-8886-fa3eb5b2e108-kube-api-access-7vzp7\") pod \"speaker-jxhnw\" (UID: \"a428871b-77b8-46c9-8886-fa3eb5b2e108\") " pod="metallb-system/speaker-jxhnw" Sep 29 21:38:01 crc kubenswrapper[4911]: I0929 21:38:01.055763 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/fef69310-fa8e-4fa6-b35c-1347023377d8-cert\") pod \"controller-5d688f5ffc-xxmg2\" (UID: \"fef69310-fa8e-4fa6-b35c-1347023377d8\") " pod="metallb-system/controller-5d688f5ffc-xxmg2" Sep 29 21:38:01 crc kubenswrapper[4911]: E0929 21:38:01.055963 4911 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Sep 29 21:38:01 crc kubenswrapper[4911]: E0929 21:38:01.056020 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a428871b-77b8-46c9-8886-fa3eb5b2e108-memberlist podName:a428871b-77b8-46c9-8886-fa3eb5b2e108 nodeName:}" failed. No retries permitted until 2025-09-29 21:38:01.556003059 +0000 UTC m=+759.533115730 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/a428871b-77b8-46c9-8886-fa3eb5b2e108-memberlist") pod "speaker-jxhnw" (UID: "a428871b-77b8-46c9-8886-fa3eb5b2e108") : secret "metallb-memberlist" not found Sep 29 21:38:01 crc kubenswrapper[4911]: I0929 21:38:01.056647 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/a428871b-77b8-46c9-8886-fa3eb5b2e108-metallb-excludel2\") pod \"speaker-jxhnw\" (UID: \"a428871b-77b8-46c9-8886-fa3eb5b2e108\") " pod="metallb-system/speaker-jxhnw" Sep 29 21:38:01 crc kubenswrapper[4911]: I0929 21:38:01.059688 4911 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Sep 29 21:38:01 crc kubenswrapper[4911]: I0929 21:38:01.061928 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/fef69310-fa8e-4fa6-b35c-1347023377d8-metrics-certs\") pod \"controller-5d688f5ffc-xxmg2\" (UID: \"fef69310-fa8e-4fa6-b35c-1347023377d8\") " pod="metallb-system/controller-5d688f5ffc-xxmg2" Sep 29 21:38:01 crc kubenswrapper[4911]: I0929 21:38:01.061938 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a428871b-77b8-46c9-8886-fa3eb5b2e108-metrics-certs\") pod \"speaker-jxhnw\" (UID: \"a428871b-77b8-46c9-8886-fa3eb5b2e108\") " pod="metallb-system/speaker-jxhnw" Sep 29 21:38:01 crc kubenswrapper[4911]: I0929 21:38:01.065285 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-lj8ph" Sep 29 21:38:01 crc kubenswrapper[4911]: I0929 21:38:01.070217 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/fef69310-fa8e-4fa6-b35c-1347023377d8-cert\") pod \"controller-5d688f5ffc-xxmg2\" (UID: \"fef69310-fa8e-4fa6-b35c-1347023377d8\") " pod="metallb-system/controller-5d688f5ffc-xxmg2" Sep 29 21:38:01 crc kubenswrapper[4911]: I0929 21:38:01.076510 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7vzp7\" (UniqueName: \"kubernetes.io/projected/a428871b-77b8-46c9-8886-fa3eb5b2e108-kube-api-access-7vzp7\") pod \"speaker-jxhnw\" (UID: \"a428871b-77b8-46c9-8886-fa3eb5b2e108\") " pod="metallb-system/speaker-jxhnw" Sep 29 21:38:01 crc kubenswrapper[4911]: I0929 21:38:01.085567 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kmkp6\" (UniqueName: \"kubernetes.io/projected/fef69310-fa8e-4fa6-b35c-1347023377d8-kube-api-access-kmkp6\") pod \"controller-5d688f5ffc-xxmg2\" (UID: \"fef69310-fa8e-4fa6-b35c-1347023377d8\") " pod="metallb-system/controller-5d688f5ffc-xxmg2" Sep 29 21:38:01 crc kubenswrapper[4911]: I0929 21:38:01.214189 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-5d688f5ffc-xxmg2" Sep 29 21:38:01 crc kubenswrapper[4911]: I0929 21:38:01.460627 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/000d716a-9bed-4422-8a16-8598ff854239-cert\") pod \"frr-k8s-webhook-server-5478bdb765-5ztnx\" (UID: \"000d716a-9bed-4422-8a16-8598ff854239\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-5ztnx" Sep 29 21:38:01 crc kubenswrapper[4911]: I0929 21:38:01.466214 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/000d716a-9bed-4422-8a16-8598ff854239-cert\") pod \"frr-k8s-webhook-server-5478bdb765-5ztnx\" (UID: \"000d716a-9bed-4422-8a16-8598ff854239\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-5ztnx" Sep 29 21:38:01 crc kubenswrapper[4911]: I0929 21:38:01.562167 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/a428871b-77b8-46c9-8886-fa3eb5b2e108-memberlist\") pod \"speaker-jxhnw\" (UID: \"a428871b-77b8-46c9-8886-fa3eb5b2e108\") " pod="metallb-system/speaker-jxhnw" Sep 29 21:38:01 crc kubenswrapper[4911]: E0929 21:38:01.562328 4911 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Sep 29 21:38:01 crc kubenswrapper[4911]: E0929 21:38:01.562387 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a428871b-77b8-46c9-8886-fa3eb5b2e108-memberlist podName:a428871b-77b8-46c9-8886-fa3eb5b2e108 nodeName:}" failed. No retries permitted until 2025-09-29 21:38:02.562369294 +0000 UTC m=+760.539481965 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/a428871b-77b8-46c9-8886-fa3eb5b2e108-memberlist") pod "speaker-jxhnw" (UID: "a428871b-77b8-46c9-8886-fa3eb5b2e108") : secret "metallb-memberlist" not found Sep 29 21:38:01 crc kubenswrapper[4911]: I0929 21:38:01.680614 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-5ztnx" Sep 29 21:38:01 crc kubenswrapper[4911]: I0929 21:38:01.758362 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-5d688f5ffc-xxmg2"] Sep 29 21:38:01 crc kubenswrapper[4911]: W0929 21:38:01.769071 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfef69310_fa8e_4fa6_b35c_1347023377d8.slice/crio-9a11b30adb28a9065864e830748ce5b32ce39f147e5077fbda746a0fc37fdaaa WatchSource:0}: Error finding container 9a11b30adb28a9065864e830748ce5b32ce39f147e5077fbda746a0fc37fdaaa: Status 404 returned error can't find the container with id 9a11b30adb28a9065864e830748ce5b32ce39f147e5077fbda746a0fc37fdaaa Sep 29 21:38:01 crc kubenswrapper[4911]: I0929 21:38:01.832249 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-lj8ph" event={"ID":"cb4339ee-098d-428e-89a8-d57aec12356c","Type":"ContainerStarted","Data":"e08c6919bfd47efd1cbd7d19713542d9581c5847482f0b425867af5f1d113332"} Sep 29 21:38:01 crc kubenswrapper[4911]: I0929 21:38:01.838057 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5d688f5ffc-xxmg2" event={"ID":"fef69310-fa8e-4fa6-b35c-1347023377d8","Type":"ContainerStarted","Data":"9a11b30adb28a9065864e830748ce5b32ce39f147e5077fbda746a0fc37fdaaa"} Sep 29 21:38:02 crc kubenswrapper[4911]: I0929 21:38:02.191815 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-5478bdb765-5ztnx"] Sep 29 21:38:02 crc kubenswrapper[4911]: I0929 21:38:02.577257 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/a428871b-77b8-46c9-8886-fa3eb5b2e108-memberlist\") pod \"speaker-jxhnw\" (UID: \"a428871b-77b8-46c9-8886-fa3eb5b2e108\") " pod="metallb-system/speaker-jxhnw" Sep 29 21:38:02 crc kubenswrapper[4911]: I0929 21:38:02.587041 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/a428871b-77b8-46c9-8886-fa3eb5b2e108-memberlist\") pod \"speaker-jxhnw\" (UID: \"a428871b-77b8-46c9-8886-fa3eb5b2e108\") " pod="metallb-system/speaker-jxhnw" Sep 29 21:38:02 crc kubenswrapper[4911]: I0929 21:38:02.698316 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-jxhnw" Sep 29 21:38:02 crc kubenswrapper[4911]: I0929 21:38:02.879777 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-5ztnx" event={"ID":"000d716a-9bed-4422-8a16-8598ff854239","Type":"ContainerStarted","Data":"05aa8d961e602727754e408260d4172a58359e3efe77d9ac6d5255d7789ebe9c"} Sep 29 21:38:02 crc kubenswrapper[4911]: I0929 21:38:02.883153 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5d688f5ffc-xxmg2" event={"ID":"fef69310-fa8e-4fa6-b35c-1347023377d8","Type":"ContainerStarted","Data":"281db391aa63b2f10d645ef47ecb3dca308d3a95d4a127809c7903d3f52295cb"} Sep 29 21:38:02 crc kubenswrapper[4911]: I0929 21:38:02.883201 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5d688f5ffc-xxmg2" event={"ID":"fef69310-fa8e-4fa6-b35c-1347023377d8","Type":"ContainerStarted","Data":"d45bb59ac8c7f56dc2762c71472c4b340a2fb06a8bb93e4bc0ad7a26c17f311c"} Sep 29 21:38:02 crc kubenswrapper[4911]: I0929 21:38:02.884267 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-5d688f5ffc-xxmg2" Sep 29 21:38:02 crc kubenswrapper[4911]: I0929 21:38:02.886216 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-jxhnw" event={"ID":"a428871b-77b8-46c9-8886-fa3eb5b2e108","Type":"ContainerStarted","Data":"b9d6b60d3b714247fda234bad2dbf7d2800626e4e3c0a3eb9bcc52969e81f590"} Sep 29 21:38:02 crc kubenswrapper[4911]: I0929 21:38:02.909305 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-5d688f5ffc-xxmg2" podStartSLOduration=2.90928751 podStartE2EDuration="2.90928751s" podCreationTimestamp="2025-09-29 21:38:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:38:02.901499063 +0000 UTC m=+760.878611734" watchObservedRunningTime="2025-09-29 21:38:02.90928751 +0000 UTC m=+760.886400171" Sep 29 21:38:03 crc kubenswrapper[4911]: I0929 21:38:03.899469 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-jxhnw" event={"ID":"a428871b-77b8-46c9-8886-fa3eb5b2e108","Type":"ContainerStarted","Data":"7a79ff95bf0925bd34579f6e8cabd081d6194e164767b565ac51a5b8badce9d2"} Sep 29 21:38:03 crc kubenswrapper[4911]: I0929 21:38:03.899811 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-jxhnw" event={"ID":"a428871b-77b8-46c9-8886-fa3eb5b2e108","Type":"ContainerStarted","Data":"eeedb615618491c49289799e11f1cea86f12f53bd7b4b884c830f23f6fb620d0"} Sep 29 21:38:03 crc kubenswrapper[4911]: I0929 21:38:03.899835 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-jxhnw" Sep 29 21:38:03 crc kubenswrapper[4911]: I0929 21:38:03.919923 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-jxhnw" podStartSLOduration=3.919886242 podStartE2EDuration="3.919886242s" podCreationTimestamp="2025-09-29 21:38:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:38:03.912088195 +0000 UTC m=+761.889200866" watchObservedRunningTime="2025-09-29 21:38:03.919886242 +0000 UTC m=+761.896998913" Sep 29 21:38:06 crc kubenswrapper[4911]: I0929 21:38:06.251812 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-bqp4t"] Sep 29 21:38:06 crc kubenswrapper[4911]: I0929 21:38:06.253714 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bqp4t" Sep 29 21:38:06 crc kubenswrapper[4911]: I0929 21:38:06.268486 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-bqp4t"] Sep 29 21:38:06 crc kubenswrapper[4911]: I0929 21:38:06.360586 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/48cc0bd9-b2a0-4d13-888f-8abf4f244af8-utilities\") pod \"redhat-marketplace-bqp4t\" (UID: \"48cc0bd9-b2a0-4d13-888f-8abf4f244af8\") " pod="openshift-marketplace/redhat-marketplace-bqp4t" Sep 29 21:38:06 crc kubenswrapper[4911]: I0929 21:38:06.360627 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tq9jx\" (UniqueName: \"kubernetes.io/projected/48cc0bd9-b2a0-4d13-888f-8abf4f244af8-kube-api-access-tq9jx\") pod \"redhat-marketplace-bqp4t\" (UID: \"48cc0bd9-b2a0-4d13-888f-8abf4f244af8\") " pod="openshift-marketplace/redhat-marketplace-bqp4t" Sep 29 21:38:06 crc kubenswrapper[4911]: I0929 21:38:06.360698 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/48cc0bd9-b2a0-4d13-888f-8abf4f244af8-catalog-content\") pod \"redhat-marketplace-bqp4t\" (UID: \"48cc0bd9-b2a0-4d13-888f-8abf4f244af8\") " pod="openshift-marketplace/redhat-marketplace-bqp4t" Sep 29 21:38:06 crc kubenswrapper[4911]: I0929 21:38:06.462257 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/48cc0bd9-b2a0-4d13-888f-8abf4f244af8-catalog-content\") pod \"redhat-marketplace-bqp4t\" (UID: \"48cc0bd9-b2a0-4d13-888f-8abf4f244af8\") " pod="openshift-marketplace/redhat-marketplace-bqp4t" Sep 29 21:38:06 crc kubenswrapper[4911]: I0929 21:38:06.462316 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/48cc0bd9-b2a0-4d13-888f-8abf4f244af8-utilities\") pod \"redhat-marketplace-bqp4t\" (UID: \"48cc0bd9-b2a0-4d13-888f-8abf4f244af8\") " pod="openshift-marketplace/redhat-marketplace-bqp4t" Sep 29 21:38:06 crc kubenswrapper[4911]: I0929 21:38:06.462336 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tq9jx\" (UniqueName: \"kubernetes.io/projected/48cc0bd9-b2a0-4d13-888f-8abf4f244af8-kube-api-access-tq9jx\") pod \"redhat-marketplace-bqp4t\" (UID: \"48cc0bd9-b2a0-4d13-888f-8abf4f244af8\") " pod="openshift-marketplace/redhat-marketplace-bqp4t" Sep 29 21:38:06 crc kubenswrapper[4911]: I0929 21:38:06.462844 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/48cc0bd9-b2a0-4d13-888f-8abf4f244af8-catalog-content\") pod \"redhat-marketplace-bqp4t\" (UID: \"48cc0bd9-b2a0-4d13-888f-8abf4f244af8\") " pod="openshift-marketplace/redhat-marketplace-bqp4t" Sep 29 21:38:06 crc kubenswrapper[4911]: I0929 21:38:06.463029 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/48cc0bd9-b2a0-4d13-888f-8abf4f244af8-utilities\") pod \"redhat-marketplace-bqp4t\" (UID: \"48cc0bd9-b2a0-4d13-888f-8abf4f244af8\") " pod="openshift-marketplace/redhat-marketplace-bqp4t" Sep 29 21:38:06 crc kubenswrapper[4911]: I0929 21:38:06.483601 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tq9jx\" (UniqueName: \"kubernetes.io/projected/48cc0bd9-b2a0-4d13-888f-8abf4f244af8-kube-api-access-tq9jx\") pod \"redhat-marketplace-bqp4t\" (UID: \"48cc0bd9-b2a0-4d13-888f-8abf4f244af8\") " pod="openshift-marketplace/redhat-marketplace-bqp4t" Sep 29 21:38:06 crc kubenswrapper[4911]: I0929 21:38:06.581568 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bqp4t" Sep 29 21:38:09 crc kubenswrapper[4911]: I0929 21:38:09.695660 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-bqp4t"] Sep 29 21:38:09 crc kubenswrapper[4911]: W0929 21:38:09.704695 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod48cc0bd9_b2a0_4d13_888f_8abf4f244af8.slice/crio-e96181fd88890996b606a5e888a8887653bd44c005d674fd8040507b25c6bcf7 WatchSource:0}: Error finding container e96181fd88890996b606a5e888a8887653bd44c005d674fd8040507b25c6bcf7: Status 404 returned error can't find the container with id e96181fd88890996b606a5e888a8887653bd44c005d674fd8040507b25c6bcf7 Sep 29 21:38:09 crc kubenswrapper[4911]: I0929 21:38:09.939555 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-5ztnx" event={"ID":"000d716a-9bed-4422-8a16-8598ff854239","Type":"ContainerStarted","Data":"204aa0d023649bf09101fbf49658c61fe9e3b3e0b06cbc67bd4acbda5d885068"} Sep 29 21:38:09 crc kubenswrapper[4911]: I0929 21:38:09.939710 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-5ztnx" Sep 29 21:38:09 crc kubenswrapper[4911]: I0929 21:38:09.942439 4911 generic.go:334] "Generic (PLEG): container finished" podID="cb4339ee-098d-428e-89a8-d57aec12356c" containerID="c74ee68feed3b30b7eb1114a325e16dd39e712fe7c18338c6a3820f26e903882" exitCode=0 Sep 29 21:38:09 crc kubenswrapper[4911]: I0929 21:38:09.942560 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-lj8ph" event={"ID":"cb4339ee-098d-428e-89a8-d57aec12356c","Type":"ContainerDied","Data":"c74ee68feed3b30b7eb1114a325e16dd39e712fe7c18338c6a3820f26e903882"} Sep 29 21:38:09 crc kubenswrapper[4911]: I0929 21:38:09.948321 4911 generic.go:334] "Generic (PLEG): container finished" podID="48cc0bd9-b2a0-4d13-888f-8abf4f244af8" containerID="0f4f0460d1bb7d859627a9e628b935d53198ff8831b26a10578c5cb2828f25ed" exitCode=0 Sep 29 21:38:09 crc kubenswrapper[4911]: I0929 21:38:09.948373 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bqp4t" event={"ID":"48cc0bd9-b2a0-4d13-888f-8abf4f244af8","Type":"ContainerDied","Data":"0f4f0460d1bb7d859627a9e628b935d53198ff8831b26a10578c5cb2828f25ed"} Sep 29 21:38:09 crc kubenswrapper[4911]: I0929 21:38:09.948403 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bqp4t" event={"ID":"48cc0bd9-b2a0-4d13-888f-8abf4f244af8","Type":"ContainerStarted","Data":"e96181fd88890996b606a5e888a8887653bd44c005d674fd8040507b25c6bcf7"} Sep 29 21:38:09 crc kubenswrapper[4911]: I0929 21:38:09.967238 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-5ztnx" podStartSLOduration=2.825372185 podStartE2EDuration="9.967199679s" podCreationTimestamp="2025-09-29 21:38:00 +0000 UTC" firstStartedPulling="2025-09-29 21:38:02.20850493 +0000 UTC m=+760.185617601" lastFinishedPulling="2025-09-29 21:38:09.350332424 +0000 UTC m=+767.327445095" observedRunningTime="2025-09-29 21:38:09.959746292 +0000 UTC m=+767.936859023" watchObservedRunningTime="2025-09-29 21:38:09.967199679 +0000 UTC m=+767.944312380" Sep 29 21:38:10 crc kubenswrapper[4911]: I0929 21:38:10.961507 4911 generic.go:334] "Generic (PLEG): container finished" podID="cb4339ee-098d-428e-89a8-d57aec12356c" containerID="fff32fd1cc0d308ebf1b2fd40f6c04d929e5d1b17150d86ced22c5edc8f9c9b1" exitCode=0 Sep 29 21:38:10 crc kubenswrapper[4911]: I0929 21:38:10.962031 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-lj8ph" event={"ID":"cb4339ee-098d-428e-89a8-d57aec12356c","Type":"ContainerDied","Data":"fff32fd1cc0d308ebf1b2fd40f6c04d929e5d1b17150d86ced22c5edc8f9c9b1"} Sep 29 21:38:10 crc kubenswrapper[4911]: I0929 21:38:10.968512 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bqp4t" event={"ID":"48cc0bd9-b2a0-4d13-888f-8abf4f244af8","Type":"ContainerStarted","Data":"6232f9c1406a5da56d3cb87d1f3f35de23ffda3861b2116d9bb55e901869ef8a"} Sep 29 21:38:11 crc kubenswrapper[4911]: I0929 21:38:11.220862 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-5d688f5ffc-xxmg2" Sep 29 21:38:11 crc kubenswrapper[4911]: I0929 21:38:11.980079 4911 generic.go:334] "Generic (PLEG): container finished" podID="cb4339ee-098d-428e-89a8-d57aec12356c" containerID="ad8551e90cbb146cab62863b090577092245026f5061beba527b9f2a57626919" exitCode=0 Sep 29 21:38:11 crc kubenswrapper[4911]: I0929 21:38:11.980126 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-lj8ph" event={"ID":"cb4339ee-098d-428e-89a8-d57aec12356c","Type":"ContainerDied","Data":"ad8551e90cbb146cab62863b090577092245026f5061beba527b9f2a57626919"} Sep 29 21:38:11 crc kubenswrapper[4911]: I0929 21:38:11.988141 4911 generic.go:334] "Generic (PLEG): container finished" podID="48cc0bd9-b2a0-4d13-888f-8abf4f244af8" containerID="6232f9c1406a5da56d3cb87d1f3f35de23ffda3861b2116d9bb55e901869ef8a" exitCode=0 Sep 29 21:38:11 crc kubenswrapper[4911]: I0929 21:38:11.988218 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bqp4t" event={"ID":"48cc0bd9-b2a0-4d13-888f-8abf4f244af8","Type":"ContainerDied","Data":"6232f9c1406a5da56d3cb87d1f3f35de23ffda3861b2116d9bb55e901869ef8a"} Sep 29 21:38:11 crc kubenswrapper[4911]: I0929 21:38:11.988267 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bqp4t" event={"ID":"48cc0bd9-b2a0-4d13-888f-8abf4f244af8","Type":"ContainerStarted","Data":"2623cfbdaf2efec7725cca57e1c584c230b6c240479d3e7d25341cea8b1756b1"} Sep 29 21:38:12 crc kubenswrapper[4911]: I0929 21:38:12.049868 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-bqp4t" podStartSLOduration=4.604598645 podStartE2EDuration="6.049836325s" podCreationTimestamp="2025-09-29 21:38:06 +0000 UTC" firstStartedPulling="2025-09-29 21:38:09.950901561 +0000 UTC m=+767.928014252" lastFinishedPulling="2025-09-29 21:38:11.396139261 +0000 UTC m=+769.373251932" observedRunningTime="2025-09-29 21:38:12.044239717 +0000 UTC m=+770.021352428" watchObservedRunningTime="2025-09-29 21:38:12.049836325 +0000 UTC m=+770.026949026" Sep 29 21:38:13 crc kubenswrapper[4911]: I0929 21:38:12.998018 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-lj8ph" event={"ID":"cb4339ee-098d-428e-89a8-d57aec12356c","Type":"ContainerStarted","Data":"3dfaae9d705e8f50f3c7764833868f6a35598813ae87e9874e2bb2fee4b330fb"} Sep 29 21:38:13 crc kubenswrapper[4911]: I0929 21:38:12.998290 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-lj8ph" event={"ID":"cb4339ee-098d-428e-89a8-d57aec12356c","Type":"ContainerStarted","Data":"5ac8cf7eae44e9c90cc482f6e48687be87580014290620b95f75fbe3b69abc56"} Sep 29 21:38:13 crc kubenswrapper[4911]: I0929 21:38:12.998304 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-lj8ph" event={"ID":"cb4339ee-098d-428e-89a8-d57aec12356c","Type":"ContainerStarted","Data":"64bbe45a9b2518efe90ff0808ef5577d231c46a16bd0f175f2db1b5308f064b7"} Sep 29 21:38:13 crc kubenswrapper[4911]: I0929 21:38:12.998316 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-lj8ph" event={"ID":"cb4339ee-098d-428e-89a8-d57aec12356c","Type":"ContainerStarted","Data":"5b96623f5169859946403404afbe2432aa08a1fdd9cbac9e683a1f872ff69d09"} Sep 29 21:38:13 crc kubenswrapper[4911]: I0929 21:38:12.998327 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-lj8ph" event={"ID":"cb4339ee-098d-428e-89a8-d57aec12356c","Type":"ContainerStarted","Data":"c0e5a2634b08b076c4d47015b8b9e0df9387e709844cc315ba03dc4013a78ff8"} Sep 29 21:38:13 crc kubenswrapper[4911]: I0929 21:38:13.642999 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-zgw75"] Sep 29 21:38:13 crc kubenswrapper[4911]: I0929 21:38:13.645178 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zgw75" Sep 29 21:38:13 crc kubenswrapper[4911]: I0929 21:38:13.660403 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-zgw75"] Sep 29 21:38:13 crc kubenswrapper[4911]: I0929 21:38:13.801484 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6585a44e-cada-4f08-83f7-4e13bd80bf53-utilities\") pod \"certified-operators-zgw75\" (UID: \"6585a44e-cada-4f08-83f7-4e13bd80bf53\") " pod="openshift-marketplace/certified-operators-zgw75" Sep 29 21:38:13 crc kubenswrapper[4911]: I0929 21:38:13.801826 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6585a44e-cada-4f08-83f7-4e13bd80bf53-catalog-content\") pod \"certified-operators-zgw75\" (UID: \"6585a44e-cada-4f08-83f7-4e13bd80bf53\") " pod="openshift-marketplace/certified-operators-zgw75" Sep 29 21:38:13 crc kubenswrapper[4911]: I0929 21:38:13.801847 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x5fzt\" (UniqueName: \"kubernetes.io/projected/6585a44e-cada-4f08-83f7-4e13bd80bf53-kube-api-access-x5fzt\") pod \"certified-operators-zgw75\" (UID: \"6585a44e-cada-4f08-83f7-4e13bd80bf53\") " pod="openshift-marketplace/certified-operators-zgw75" Sep 29 21:38:13 crc kubenswrapper[4911]: I0929 21:38:13.902950 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6585a44e-cada-4f08-83f7-4e13bd80bf53-utilities\") pod \"certified-operators-zgw75\" (UID: \"6585a44e-cada-4f08-83f7-4e13bd80bf53\") " pod="openshift-marketplace/certified-operators-zgw75" Sep 29 21:38:13 crc kubenswrapper[4911]: I0929 21:38:13.903038 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6585a44e-cada-4f08-83f7-4e13bd80bf53-catalog-content\") pod \"certified-operators-zgw75\" (UID: \"6585a44e-cada-4f08-83f7-4e13bd80bf53\") " pod="openshift-marketplace/certified-operators-zgw75" Sep 29 21:38:13 crc kubenswrapper[4911]: I0929 21:38:13.903058 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x5fzt\" (UniqueName: \"kubernetes.io/projected/6585a44e-cada-4f08-83f7-4e13bd80bf53-kube-api-access-x5fzt\") pod \"certified-operators-zgw75\" (UID: \"6585a44e-cada-4f08-83f7-4e13bd80bf53\") " pod="openshift-marketplace/certified-operators-zgw75" Sep 29 21:38:13 crc kubenswrapper[4911]: I0929 21:38:13.903771 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6585a44e-cada-4f08-83f7-4e13bd80bf53-utilities\") pod \"certified-operators-zgw75\" (UID: \"6585a44e-cada-4f08-83f7-4e13bd80bf53\") " pod="openshift-marketplace/certified-operators-zgw75" Sep 29 21:38:13 crc kubenswrapper[4911]: I0929 21:38:13.903839 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6585a44e-cada-4f08-83f7-4e13bd80bf53-catalog-content\") pod \"certified-operators-zgw75\" (UID: \"6585a44e-cada-4f08-83f7-4e13bd80bf53\") " pod="openshift-marketplace/certified-operators-zgw75" Sep 29 21:38:13 crc kubenswrapper[4911]: I0929 21:38:13.927724 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x5fzt\" (UniqueName: \"kubernetes.io/projected/6585a44e-cada-4f08-83f7-4e13bd80bf53-kube-api-access-x5fzt\") pod \"certified-operators-zgw75\" (UID: \"6585a44e-cada-4f08-83f7-4e13bd80bf53\") " pod="openshift-marketplace/certified-operators-zgw75" Sep 29 21:38:13 crc kubenswrapper[4911]: I0929 21:38:13.983272 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zgw75" Sep 29 21:38:14 crc kubenswrapper[4911]: I0929 21:38:14.009371 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-lj8ph" event={"ID":"cb4339ee-098d-428e-89a8-d57aec12356c","Type":"ContainerStarted","Data":"bf3122e49abd43a2e20a0683d4c76b22ee99639bf7319e20e8894968c254d24d"} Sep 29 21:38:14 crc kubenswrapper[4911]: I0929 21:38:14.010276 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-lj8ph" Sep 29 21:38:14 crc kubenswrapper[4911]: I0929 21:38:14.041129 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-lj8ph" podStartSLOduration=5.935405627 podStartE2EDuration="14.04111498s" podCreationTimestamp="2025-09-29 21:38:00 +0000 UTC" firstStartedPulling="2025-09-29 21:38:01.223557872 +0000 UTC m=+759.200670543" lastFinishedPulling="2025-09-29 21:38:09.329267225 +0000 UTC m=+767.306379896" observedRunningTime="2025-09-29 21:38:14.039097915 +0000 UTC m=+772.016210596" watchObservedRunningTime="2025-09-29 21:38:14.04111498 +0000 UTC m=+772.018227651" Sep 29 21:38:14 crc kubenswrapper[4911]: I0929 21:38:14.500243 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-zgw75"] Sep 29 21:38:14 crc kubenswrapper[4911]: W0929 21:38:14.503662 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6585a44e_cada_4f08_83f7_4e13bd80bf53.slice/crio-a625fbb6dfcc49b3a26c72e72e9d651979f49dffb4b351591e9d83e1018f7f0e WatchSource:0}: Error finding container a625fbb6dfcc49b3a26c72e72e9d651979f49dffb4b351591e9d83e1018f7f0e: Status 404 returned error can't find the container with id a625fbb6dfcc49b3a26c72e72e9d651979f49dffb4b351591e9d83e1018f7f0e Sep 29 21:38:15 crc kubenswrapper[4911]: I0929 21:38:15.021515 4911 generic.go:334] "Generic (PLEG): container finished" podID="6585a44e-cada-4f08-83f7-4e13bd80bf53" containerID="a7d1082b999f00e49a62d29f026b7dc0d550e161b7f5b727b311297dc4e5468f" exitCode=0 Sep 29 21:38:15 crc kubenswrapper[4911]: I0929 21:38:15.021625 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zgw75" event={"ID":"6585a44e-cada-4f08-83f7-4e13bd80bf53","Type":"ContainerDied","Data":"a7d1082b999f00e49a62d29f026b7dc0d550e161b7f5b727b311297dc4e5468f"} Sep 29 21:38:15 crc kubenswrapper[4911]: I0929 21:38:15.022282 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zgw75" event={"ID":"6585a44e-cada-4f08-83f7-4e13bd80bf53","Type":"ContainerStarted","Data":"a625fbb6dfcc49b3a26c72e72e9d651979f49dffb4b351591e9d83e1018f7f0e"} Sep 29 21:38:16 crc kubenswrapper[4911]: I0929 21:38:16.067195 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-lj8ph" Sep 29 21:38:16 crc kubenswrapper[4911]: I0929 21:38:16.123767 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-lj8ph" Sep 29 21:38:16 crc kubenswrapper[4911]: I0929 21:38:16.582410 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-bqp4t" Sep 29 21:38:16 crc kubenswrapper[4911]: I0929 21:38:16.582759 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-bqp4t" Sep 29 21:38:16 crc kubenswrapper[4911]: I0929 21:38:16.638647 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-bqp4t" Sep 29 21:38:17 crc kubenswrapper[4911]: I0929 21:38:17.041545 4911 generic.go:334] "Generic (PLEG): container finished" podID="6585a44e-cada-4f08-83f7-4e13bd80bf53" containerID="4d4707b8de6cc6fb5d2e07544607b386de0b82670fb0688322645a6de98ee99f" exitCode=0 Sep 29 21:38:17 crc kubenswrapper[4911]: I0929 21:38:17.041644 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zgw75" event={"ID":"6585a44e-cada-4f08-83f7-4e13bd80bf53","Type":"ContainerDied","Data":"4d4707b8de6cc6fb5d2e07544607b386de0b82670fb0688322645a6de98ee99f"} Sep 29 21:38:17 crc kubenswrapper[4911]: I0929 21:38:17.121964 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-bqp4t" Sep 29 21:38:18 crc kubenswrapper[4911]: I0929 21:38:18.420785 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-bqp4t"] Sep 29 21:38:19 crc kubenswrapper[4911]: I0929 21:38:19.061427 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zgw75" event={"ID":"6585a44e-cada-4f08-83f7-4e13bd80bf53","Type":"ContainerStarted","Data":"294d5a750ab46fab5f7b765be02d92869f1e21fda46ba1a3b373055750ac9c55"} Sep 29 21:38:19 crc kubenswrapper[4911]: I0929 21:38:19.061624 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-bqp4t" podUID="48cc0bd9-b2a0-4d13-888f-8abf4f244af8" containerName="registry-server" containerID="cri-o://2623cfbdaf2efec7725cca57e1c584c230b6c240479d3e7d25341cea8b1756b1" gracePeriod=2 Sep 29 21:38:19 crc kubenswrapper[4911]: I0929 21:38:19.088526 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-zgw75" podStartSLOduration=2.602013592 podStartE2EDuration="6.088499903s" podCreationTimestamp="2025-09-29 21:38:13 +0000 UTC" firstStartedPulling="2025-09-29 21:38:15.024526058 +0000 UTC m=+773.001638769" lastFinishedPulling="2025-09-29 21:38:18.511012369 +0000 UTC m=+776.488125080" observedRunningTime="2025-09-29 21:38:19.08618546 +0000 UTC m=+777.063298161" watchObservedRunningTime="2025-09-29 21:38:19.088499903 +0000 UTC m=+777.065612614" Sep 29 21:38:19 crc kubenswrapper[4911]: I0929 21:38:19.646218 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bqp4t" Sep 29 21:38:19 crc kubenswrapper[4911]: I0929 21:38:19.794184 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tq9jx\" (UniqueName: \"kubernetes.io/projected/48cc0bd9-b2a0-4d13-888f-8abf4f244af8-kube-api-access-tq9jx\") pod \"48cc0bd9-b2a0-4d13-888f-8abf4f244af8\" (UID: \"48cc0bd9-b2a0-4d13-888f-8abf4f244af8\") " Sep 29 21:38:19 crc kubenswrapper[4911]: I0929 21:38:19.794310 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/48cc0bd9-b2a0-4d13-888f-8abf4f244af8-utilities\") pod \"48cc0bd9-b2a0-4d13-888f-8abf4f244af8\" (UID: \"48cc0bd9-b2a0-4d13-888f-8abf4f244af8\") " Sep 29 21:38:19 crc kubenswrapper[4911]: I0929 21:38:19.794391 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/48cc0bd9-b2a0-4d13-888f-8abf4f244af8-catalog-content\") pod \"48cc0bd9-b2a0-4d13-888f-8abf4f244af8\" (UID: \"48cc0bd9-b2a0-4d13-888f-8abf4f244af8\") " Sep 29 21:38:19 crc kubenswrapper[4911]: I0929 21:38:19.797183 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/48cc0bd9-b2a0-4d13-888f-8abf4f244af8-utilities" (OuterVolumeSpecName: "utilities") pod "48cc0bd9-b2a0-4d13-888f-8abf4f244af8" (UID: "48cc0bd9-b2a0-4d13-888f-8abf4f244af8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:38:19 crc kubenswrapper[4911]: I0929 21:38:19.800059 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/48cc0bd9-b2a0-4d13-888f-8abf4f244af8-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 21:38:19 crc kubenswrapper[4911]: I0929 21:38:19.804775 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/48cc0bd9-b2a0-4d13-888f-8abf4f244af8-kube-api-access-tq9jx" (OuterVolumeSpecName: "kube-api-access-tq9jx") pod "48cc0bd9-b2a0-4d13-888f-8abf4f244af8" (UID: "48cc0bd9-b2a0-4d13-888f-8abf4f244af8"). InnerVolumeSpecName "kube-api-access-tq9jx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:38:19 crc kubenswrapper[4911]: I0929 21:38:19.812970 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/48cc0bd9-b2a0-4d13-888f-8abf4f244af8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "48cc0bd9-b2a0-4d13-888f-8abf4f244af8" (UID: "48cc0bd9-b2a0-4d13-888f-8abf4f244af8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:38:19 crc kubenswrapper[4911]: I0929 21:38:19.901828 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tq9jx\" (UniqueName: \"kubernetes.io/projected/48cc0bd9-b2a0-4d13-888f-8abf4f244af8-kube-api-access-tq9jx\") on node \"crc\" DevicePath \"\"" Sep 29 21:38:19 crc kubenswrapper[4911]: I0929 21:38:19.901870 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/48cc0bd9-b2a0-4d13-888f-8abf4f244af8-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 21:38:20 crc kubenswrapper[4911]: I0929 21:38:20.082984 4911 generic.go:334] "Generic (PLEG): container finished" podID="48cc0bd9-b2a0-4d13-888f-8abf4f244af8" containerID="2623cfbdaf2efec7725cca57e1c584c230b6c240479d3e7d25341cea8b1756b1" exitCode=0 Sep 29 21:38:20 crc kubenswrapper[4911]: I0929 21:38:20.083087 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bqp4t" event={"ID":"48cc0bd9-b2a0-4d13-888f-8abf4f244af8","Type":"ContainerDied","Data":"2623cfbdaf2efec7725cca57e1c584c230b6c240479d3e7d25341cea8b1756b1"} Sep 29 21:38:20 crc kubenswrapper[4911]: I0929 21:38:20.083125 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bqp4t" Sep 29 21:38:20 crc kubenswrapper[4911]: I0929 21:38:20.083158 4911 scope.go:117] "RemoveContainer" containerID="2623cfbdaf2efec7725cca57e1c584c230b6c240479d3e7d25341cea8b1756b1" Sep 29 21:38:20 crc kubenswrapper[4911]: I0929 21:38:20.083141 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bqp4t" event={"ID":"48cc0bd9-b2a0-4d13-888f-8abf4f244af8","Type":"ContainerDied","Data":"e96181fd88890996b606a5e888a8887653bd44c005d674fd8040507b25c6bcf7"} Sep 29 21:38:20 crc kubenswrapper[4911]: I0929 21:38:20.107383 4911 scope.go:117] "RemoveContainer" containerID="6232f9c1406a5da56d3cb87d1f3f35de23ffda3861b2116d9bb55e901869ef8a" Sep 29 21:38:20 crc kubenswrapper[4911]: I0929 21:38:20.112606 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-bqp4t"] Sep 29 21:38:20 crc kubenswrapper[4911]: I0929 21:38:20.115411 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-bqp4t"] Sep 29 21:38:20 crc kubenswrapper[4911]: I0929 21:38:20.139785 4911 scope.go:117] "RemoveContainer" containerID="0f4f0460d1bb7d859627a9e628b935d53198ff8831b26a10578c5cb2828f25ed" Sep 29 21:38:20 crc kubenswrapper[4911]: I0929 21:38:20.156744 4911 scope.go:117] "RemoveContainer" containerID="2623cfbdaf2efec7725cca57e1c584c230b6c240479d3e7d25341cea8b1756b1" Sep 29 21:38:20 crc kubenswrapper[4911]: E0929 21:38:20.157269 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2623cfbdaf2efec7725cca57e1c584c230b6c240479d3e7d25341cea8b1756b1\": container with ID starting with 2623cfbdaf2efec7725cca57e1c584c230b6c240479d3e7d25341cea8b1756b1 not found: ID does not exist" containerID="2623cfbdaf2efec7725cca57e1c584c230b6c240479d3e7d25341cea8b1756b1" Sep 29 21:38:20 crc kubenswrapper[4911]: I0929 21:38:20.157302 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2623cfbdaf2efec7725cca57e1c584c230b6c240479d3e7d25341cea8b1756b1"} err="failed to get container status \"2623cfbdaf2efec7725cca57e1c584c230b6c240479d3e7d25341cea8b1756b1\": rpc error: code = NotFound desc = could not find container \"2623cfbdaf2efec7725cca57e1c584c230b6c240479d3e7d25341cea8b1756b1\": container with ID starting with 2623cfbdaf2efec7725cca57e1c584c230b6c240479d3e7d25341cea8b1756b1 not found: ID does not exist" Sep 29 21:38:20 crc kubenswrapper[4911]: I0929 21:38:20.157327 4911 scope.go:117] "RemoveContainer" containerID="6232f9c1406a5da56d3cb87d1f3f35de23ffda3861b2116d9bb55e901869ef8a" Sep 29 21:38:20 crc kubenswrapper[4911]: E0929 21:38:20.157648 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6232f9c1406a5da56d3cb87d1f3f35de23ffda3861b2116d9bb55e901869ef8a\": container with ID starting with 6232f9c1406a5da56d3cb87d1f3f35de23ffda3861b2116d9bb55e901869ef8a not found: ID does not exist" containerID="6232f9c1406a5da56d3cb87d1f3f35de23ffda3861b2116d9bb55e901869ef8a" Sep 29 21:38:20 crc kubenswrapper[4911]: I0929 21:38:20.157728 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6232f9c1406a5da56d3cb87d1f3f35de23ffda3861b2116d9bb55e901869ef8a"} err="failed to get container status \"6232f9c1406a5da56d3cb87d1f3f35de23ffda3861b2116d9bb55e901869ef8a\": rpc error: code = NotFound desc = could not find container \"6232f9c1406a5da56d3cb87d1f3f35de23ffda3861b2116d9bb55e901869ef8a\": container with ID starting with 6232f9c1406a5da56d3cb87d1f3f35de23ffda3861b2116d9bb55e901869ef8a not found: ID does not exist" Sep 29 21:38:20 crc kubenswrapper[4911]: I0929 21:38:20.157801 4911 scope.go:117] "RemoveContainer" containerID="0f4f0460d1bb7d859627a9e628b935d53198ff8831b26a10578c5cb2828f25ed" Sep 29 21:38:20 crc kubenswrapper[4911]: E0929 21:38:20.158190 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0f4f0460d1bb7d859627a9e628b935d53198ff8831b26a10578c5cb2828f25ed\": container with ID starting with 0f4f0460d1bb7d859627a9e628b935d53198ff8831b26a10578c5cb2828f25ed not found: ID does not exist" containerID="0f4f0460d1bb7d859627a9e628b935d53198ff8831b26a10578c5cb2828f25ed" Sep 29 21:38:20 crc kubenswrapper[4911]: I0929 21:38:20.158232 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f4f0460d1bb7d859627a9e628b935d53198ff8831b26a10578c5cb2828f25ed"} err="failed to get container status \"0f4f0460d1bb7d859627a9e628b935d53198ff8831b26a10578c5cb2828f25ed\": rpc error: code = NotFound desc = could not find container \"0f4f0460d1bb7d859627a9e628b935d53198ff8831b26a10578c5cb2828f25ed\": container with ID starting with 0f4f0460d1bb7d859627a9e628b935d53198ff8831b26a10578c5cb2828f25ed not found: ID does not exist" Sep 29 21:38:20 crc kubenswrapper[4911]: I0929 21:38:20.713819 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="48cc0bd9-b2a0-4d13-888f-8abf4f244af8" path="/var/lib/kubelet/pods/48cc0bd9-b2a0-4d13-888f-8abf4f244af8/volumes" Sep 29 21:38:21 crc kubenswrapper[4911]: I0929 21:38:21.690952 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-5ztnx" Sep 29 21:38:22 crc kubenswrapper[4911]: I0929 21:38:22.715079 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-jxhnw" Sep 29 21:38:23 crc kubenswrapper[4911]: I0929 21:38:23.579199 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-5qnxf"] Sep 29 21:38:23 crc kubenswrapper[4911]: E0929 21:38:23.579723 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48cc0bd9-b2a0-4d13-888f-8abf4f244af8" containerName="registry-server" Sep 29 21:38:23 crc kubenswrapper[4911]: I0929 21:38:23.579743 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="48cc0bd9-b2a0-4d13-888f-8abf4f244af8" containerName="registry-server" Sep 29 21:38:23 crc kubenswrapper[4911]: E0929 21:38:23.579760 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48cc0bd9-b2a0-4d13-888f-8abf4f244af8" containerName="extract-content" Sep 29 21:38:23 crc kubenswrapper[4911]: I0929 21:38:23.579770 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="48cc0bd9-b2a0-4d13-888f-8abf4f244af8" containerName="extract-content" Sep 29 21:38:23 crc kubenswrapper[4911]: E0929 21:38:23.579811 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48cc0bd9-b2a0-4d13-888f-8abf4f244af8" containerName="extract-utilities" Sep 29 21:38:23 crc kubenswrapper[4911]: I0929 21:38:23.579821 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="48cc0bd9-b2a0-4d13-888f-8abf4f244af8" containerName="extract-utilities" Sep 29 21:38:23 crc kubenswrapper[4911]: I0929 21:38:23.579971 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="48cc0bd9-b2a0-4d13-888f-8abf4f244af8" containerName="registry-server" Sep 29 21:38:23 crc kubenswrapper[4911]: I0929 21:38:23.580982 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5qnxf" Sep 29 21:38:23 crc kubenswrapper[4911]: I0929 21:38:23.604407 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-5qnxf"] Sep 29 21:38:23 crc kubenswrapper[4911]: I0929 21:38:23.766490 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2638699d-3494-4a19-ac0b-2373e0d1943f-catalog-content\") pod \"community-operators-5qnxf\" (UID: \"2638699d-3494-4a19-ac0b-2373e0d1943f\") " pod="openshift-marketplace/community-operators-5qnxf" Sep 29 21:38:23 crc kubenswrapper[4911]: I0929 21:38:23.766658 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2638699d-3494-4a19-ac0b-2373e0d1943f-utilities\") pod \"community-operators-5qnxf\" (UID: \"2638699d-3494-4a19-ac0b-2373e0d1943f\") " pod="openshift-marketplace/community-operators-5qnxf" Sep 29 21:38:23 crc kubenswrapper[4911]: I0929 21:38:23.766696 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7bd6r\" (UniqueName: \"kubernetes.io/projected/2638699d-3494-4a19-ac0b-2373e0d1943f-kube-api-access-7bd6r\") pod \"community-operators-5qnxf\" (UID: \"2638699d-3494-4a19-ac0b-2373e0d1943f\") " pod="openshift-marketplace/community-operators-5qnxf" Sep 29 21:38:23 crc kubenswrapper[4911]: I0929 21:38:23.867371 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2638699d-3494-4a19-ac0b-2373e0d1943f-utilities\") pod \"community-operators-5qnxf\" (UID: \"2638699d-3494-4a19-ac0b-2373e0d1943f\") " pod="openshift-marketplace/community-operators-5qnxf" Sep 29 21:38:23 crc kubenswrapper[4911]: I0929 21:38:23.867410 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7bd6r\" (UniqueName: \"kubernetes.io/projected/2638699d-3494-4a19-ac0b-2373e0d1943f-kube-api-access-7bd6r\") pod \"community-operators-5qnxf\" (UID: \"2638699d-3494-4a19-ac0b-2373e0d1943f\") " pod="openshift-marketplace/community-operators-5qnxf" Sep 29 21:38:23 crc kubenswrapper[4911]: I0929 21:38:23.867482 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2638699d-3494-4a19-ac0b-2373e0d1943f-catalog-content\") pod \"community-operators-5qnxf\" (UID: \"2638699d-3494-4a19-ac0b-2373e0d1943f\") " pod="openshift-marketplace/community-operators-5qnxf" Sep 29 21:38:23 crc kubenswrapper[4911]: I0929 21:38:23.868065 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2638699d-3494-4a19-ac0b-2373e0d1943f-catalog-content\") pod \"community-operators-5qnxf\" (UID: \"2638699d-3494-4a19-ac0b-2373e0d1943f\") " pod="openshift-marketplace/community-operators-5qnxf" Sep 29 21:38:23 crc kubenswrapper[4911]: I0929 21:38:23.868099 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2638699d-3494-4a19-ac0b-2373e0d1943f-utilities\") pod \"community-operators-5qnxf\" (UID: \"2638699d-3494-4a19-ac0b-2373e0d1943f\") " pod="openshift-marketplace/community-operators-5qnxf" Sep 29 21:38:23 crc kubenswrapper[4911]: I0929 21:38:23.890070 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7bd6r\" (UniqueName: \"kubernetes.io/projected/2638699d-3494-4a19-ac0b-2373e0d1943f-kube-api-access-7bd6r\") pod \"community-operators-5qnxf\" (UID: \"2638699d-3494-4a19-ac0b-2373e0d1943f\") " pod="openshift-marketplace/community-operators-5qnxf" Sep 29 21:38:23 crc kubenswrapper[4911]: I0929 21:38:23.913091 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5qnxf" Sep 29 21:38:23 crc kubenswrapper[4911]: I0929 21:38:23.983770 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-zgw75" Sep 29 21:38:23 crc kubenswrapper[4911]: I0929 21:38:23.984031 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-zgw75" Sep 29 21:38:24 crc kubenswrapper[4911]: I0929 21:38:24.052723 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-zgw75" Sep 29 21:38:24 crc kubenswrapper[4911]: I0929 21:38:24.186367 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-zgw75" Sep 29 21:38:24 crc kubenswrapper[4911]: I0929 21:38:24.458231 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-5qnxf"] Sep 29 21:38:24 crc kubenswrapper[4911]: W0929 21:38:24.464176 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2638699d_3494_4a19_ac0b_2373e0d1943f.slice/crio-76b373b6eb5676b085d89682ebeb052ceaabc1c43b8a4b5c4f0e8b4752335e9d WatchSource:0}: Error finding container 76b373b6eb5676b085d89682ebeb052ceaabc1c43b8a4b5c4f0e8b4752335e9d: Status 404 returned error can't find the container with id 76b373b6eb5676b085d89682ebeb052ceaabc1c43b8a4b5c4f0e8b4752335e9d Sep 29 21:38:25 crc kubenswrapper[4911]: I0929 21:38:25.131902 4911 generic.go:334] "Generic (PLEG): container finished" podID="2638699d-3494-4a19-ac0b-2373e0d1943f" containerID="acda686f86ccb16156612811325f8b355c6fe7a6898e27995b70c17f67d53619" exitCode=0 Sep 29 21:38:25 crc kubenswrapper[4911]: I0929 21:38:25.132042 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5qnxf" event={"ID":"2638699d-3494-4a19-ac0b-2373e0d1943f","Type":"ContainerDied","Data":"acda686f86ccb16156612811325f8b355c6fe7a6898e27995b70c17f67d53619"} Sep 29 21:38:25 crc kubenswrapper[4911]: I0929 21:38:25.132541 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5qnxf" event={"ID":"2638699d-3494-4a19-ac0b-2373e0d1943f","Type":"ContainerStarted","Data":"76b373b6eb5676b085d89682ebeb052ceaabc1c43b8a4b5c4f0e8b4752335e9d"} Sep 29 21:38:26 crc kubenswrapper[4911]: I0929 21:38:26.141835 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5qnxf" event={"ID":"2638699d-3494-4a19-ac0b-2373e0d1943f","Type":"ContainerStarted","Data":"885c338b49434746dd368e9ca841076d023b8f64928eb2409b81782d27205559"} Sep 29 21:38:26 crc kubenswrapper[4911]: I0929 21:38:26.619367 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-zgw75"] Sep 29 21:38:27 crc kubenswrapper[4911]: I0929 21:38:27.158441 4911 generic.go:334] "Generic (PLEG): container finished" podID="2638699d-3494-4a19-ac0b-2373e0d1943f" containerID="885c338b49434746dd368e9ca841076d023b8f64928eb2409b81782d27205559" exitCode=0 Sep 29 21:38:27 crc kubenswrapper[4911]: I0929 21:38:27.158553 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5qnxf" event={"ID":"2638699d-3494-4a19-ac0b-2373e0d1943f","Type":"ContainerDied","Data":"885c338b49434746dd368e9ca841076d023b8f64928eb2409b81782d27205559"} Sep 29 21:38:27 crc kubenswrapper[4911]: I0929 21:38:27.158878 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-zgw75" podUID="6585a44e-cada-4f08-83f7-4e13bd80bf53" containerName="registry-server" containerID="cri-o://294d5a750ab46fab5f7b765be02d92869f1e21fda46ba1a3b373055750ac9c55" gracePeriod=2 Sep 29 21:38:27 crc kubenswrapper[4911]: I0929 21:38:27.684478 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zgw75" Sep 29 21:38:27 crc kubenswrapper[4911]: I0929 21:38:27.829185 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x5fzt\" (UniqueName: \"kubernetes.io/projected/6585a44e-cada-4f08-83f7-4e13bd80bf53-kube-api-access-x5fzt\") pod \"6585a44e-cada-4f08-83f7-4e13bd80bf53\" (UID: \"6585a44e-cada-4f08-83f7-4e13bd80bf53\") " Sep 29 21:38:27 crc kubenswrapper[4911]: I0929 21:38:27.829261 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6585a44e-cada-4f08-83f7-4e13bd80bf53-catalog-content\") pod \"6585a44e-cada-4f08-83f7-4e13bd80bf53\" (UID: \"6585a44e-cada-4f08-83f7-4e13bd80bf53\") " Sep 29 21:38:27 crc kubenswrapper[4911]: I0929 21:38:27.829295 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6585a44e-cada-4f08-83f7-4e13bd80bf53-utilities\") pod \"6585a44e-cada-4f08-83f7-4e13bd80bf53\" (UID: \"6585a44e-cada-4f08-83f7-4e13bd80bf53\") " Sep 29 21:38:27 crc kubenswrapper[4911]: I0929 21:38:27.831249 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6585a44e-cada-4f08-83f7-4e13bd80bf53-utilities" (OuterVolumeSpecName: "utilities") pod "6585a44e-cada-4f08-83f7-4e13bd80bf53" (UID: "6585a44e-cada-4f08-83f7-4e13bd80bf53"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:38:27 crc kubenswrapper[4911]: I0929 21:38:27.848529 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6585a44e-cada-4f08-83f7-4e13bd80bf53-kube-api-access-x5fzt" (OuterVolumeSpecName: "kube-api-access-x5fzt") pod "6585a44e-cada-4f08-83f7-4e13bd80bf53" (UID: "6585a44e-cada-4f08-83f7-4e13bd80bf53"). InnerVolumeSpecName "kube-api-access-x5fzt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:38:27 crc kubenswrapper[4911]: I0929 21:38:27.874975 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6585a44e-cada-4f08-83f7-4e13bd80bf53-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6585a44e-cada-4f08-83f7-4e13bd80bf53" (UID: "6585a44e-cada-4f08-83f7-4e13bd80bf53"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:38:27 crc kubenswrapper[4911]: I0929 21:38:27.931231 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6585a44e-cada-4f08-83f7-4e13bd80bf53-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 21:38:27 crc kubenswrapper[4911]: I0929 21:38:27.931281 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6585a44e-cada-4f08-83f7-4e13bd80bf53-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 21:38:27 crc kubenswrapper[4911]: I0929 21:38:27.931296 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x5fzt\" (UniqueName: \"kubernetes.io/projected/6585a44e-cada-4f08-83f7-4e13bd80bf53-kube-api-access-x5fzt\") on node \"crc\" DevicePath \"\"" Sep 29 21:38:28 crc kubenswrapper[4911]: I0929 21:38:28.165813 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5qnxf" event={"ID":"2638699d-3494-4a19-ac0b-2373e0d1943f","Type":"ContainerStarted","Data":"11d4d7dd597a524735c597b9d271c7b039264f6d3979794e11cc5aeadd923160"} Sep 29 21:38:28 crc kubenswrapper[4911]: I0929 21:38:28.167555 4911 generic.go:334] "Generic (PLEG): container finished" podID="6585a44e-cada-4f08-83f7-4e13bd80bf53" containerID="294d5a750ab46fab5f7b765be02d92869f1e21fda46ba1a3b373055750ac9c55" exitCode=0 Sep 29 21:38:28 crc kubenswrapper[4911]: I0929 21:38:28.167593 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zgw75" event={"ID":"6585a44e-cada-4f08-83f7-4e13bd80bf53","Type":"ContainerDied","Data":"294d5a750ab46fab5f7b765be02d92869f1e21fda46ba1a3b373055750ac9c55"} Sep 29 21:38:28 crc kubenswrapper[4911]: I0929 21:38:28.167602 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zgw75" Sep 29 21:38:28 crc kubenswrapper[4911]: I0929 21:38:28.167616 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zgw75" event={"ID":"6585a44e-cada-4f08-83f7-4e13bd80bf53","Type":"ContainerDied","Data":"a625fbb6dfcc49b3a26c72e72e9d651979f49dffb4b351591e9d83e1018f7f0e"} Sep 29 21:38:28 crc kubenswrapper[4911]: I0929 21:38:28.167635 4911 scope.go:117] "RemoveContainer" containerID="294d5a750ab46fab5f7b765be02d92869f1e21fda46ba1a3b373055750ac9c55" Sep 29 21:38:28 crc kubenswrapper[4911]: I0929 21:38:28.184156 4911 scope.go:117] "RemoveContainer" containerID="4d4707b8de6cc6fb5d2e07544607b386de0b82670fb0688322645a6de98ee99f" Sep 29 21:38:28 crc kubenswrapper[4911]: I0929 21:38:28.188535 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-5qnxf" podStartSLOduration=2.6680070860000002 podStartE2EDuration="5.188522252s" podCreationTimestamp="2025-09-29 21:38:23 +0000 UTC" firstStartedPulling="2025-09-29 21:38:25.133732934 +0000 UTC m=+783.110845615" lastFinishedPulling="2025-09-29 21:38:27.6542481 +0000 UTC m=+785.631360781" observedRunningTime="2025-09-29 21:38:28.187323283 +0000 UTC m=+786.164435954" watchObservedRunningTime="2025-09-29 21:38:28.188522252 +0000 UTC m=+786.165634913" Sep 29 21:38:28 crc kubenswrapper[4911]: I0929 21:38:28.208841 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-zgw75"] Sep 29 21:38:28 crc kubenswrapper[4911]: I0929 21:38:28.211647 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-zgw75"] Sep 29 21:38:28 crc kubenswrapper[4911]: I0929 21:38:28.228738 4911 scope.go:117] "RemoveContainer" containerID="a7d1082b999f00e49a62d29f026b7dc0d550e161b7f5b727b311297dc4e5468f" Sep 29 21:38:28 crc kubenswrapper[4911]: I0929 21:38:28.243369 4911 scope.go:117] "RemoveContainer" containerID="294d5a750ab46fab5f7b765be02d92869f1e21fda46ba1a3b373055750ac9c55" Sep 29 21:38:28 crc kubenswrapper[4911]: E0929 21:38:28.243717 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"294d5a750ab46fab5f7b765be02d92869f1e21fda46ba1a3b373055750ac9c55\": container with ID starting with 294d5a750ab46fab5f7b765be02d92869f1e21fda46ba1a3b373055750ac9c55 not found: ID does not exist" containerID="294d5a750ab46fab5f7b765be02d92869f1e21fda46ba1a3b373055750ac9c55" Sep 29 21:38:28 crc kubenswrapper[4911]: I0929 21:38:28.243758 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"294d5a750ab46fab5f7b765be02d92869f1e21fda46ba1a3b373055750ac9c55"} err="failed to get container status \"294d5a750ab46fab5f7b765be02d92869f1e21fda46ba1a3b373055750ac9c55\": rpc error: code = NotFound desc = could not find container \"294d5a750ab46fab5f7b765be02d92869f1e21fda46ba1a3b373055750ac9c55\": container with ID starting with 294d5a750ab46fab5f7b765be02d92869f1e21fda46ba1a3b373055750ac9c55 not found: ID does not exist" Sep 29 21:38:28 crc kubenswrapper[4911]: I0929 21:38:28.243785 4911 scope.go:117] "RemoveContainer" containerID="4d4707b8de6cc6fb5d2e07544607b386de0b82670fb0688322645a6de98ee99f" Sep 29 21:38:28 crc kubenswrapper[4911]: E0929 21:38:28.244100 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4d4707b8de6cc6fb5d2e07544607b386de0b82670fb0688322645a6de98ee99f\": container with ID starting with 4d4707b8de6cc6fb5d2e07544607b386de0b82670fb0688322645a6de98ee99f not found: ID does not exist" containerID="4d4707b8de6cc6fb5d2e07544607b386de0b82670fb0688322645a6de98ee99f" Sep 29 21:38:28 crc kubenswrapper[4911]: I0929 21:38:28.244143 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4d4707b8de6cc6fb5d2e07544607b386de0b82670fb0688322645a6de98ee99f"} err="failed to get container status \"4d4707b8de6cc6fb5d2e07544607b386de0b82670fb0688322645a6de98ee99f\": rpc error: code = NotFound desc = could not find container \"4d4707b8de6cc6fb5d2e07544607b386de0b82670fb0688322645a6de98ee99f\": container with ID starting with 4d4707b8de6cc6fb5d2e07544607b386de0b82670fb0688322645a6de98ee99f not found: ID does not exist" Sep 29 21:38:28 crc kubenswrapper[4911]: I0929 21:38:28.244170 4911 scope.go:117] "RemoveContainer" containerID="a7d1082b999f00e49a62d29f026b7dc0d550e161b7f5b727b311297dc4e5468f" Sep 29 21:38:28 crc kubenswrapper[4911]: E0929 21:38:28.244418 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a7d1082b999f00e49a62d29f026b7dc0d550e161b7f5b727b311297dc4e5468f\": container with ID starting with a7d1082b999f00e49a62d29f026b7dc0d550e161b7f5b727b311297dc4e5468f not found: ID does not exist" containerID="a7d1082b999f00e49a62d29f026b7dc0d550e161b7f5b727b311297dc4e5468f" Sep 29 21:38:28 crc kubenswrapper[4911]: I0929 21:38:28.244449 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a7d1082b999f00e49a62d29f026b7dc0d550e161b7f5b727b311297dc4e5468f"} err="failed to get container status \"a7d1082b999f00e49a62d29f026b7dc0d550e161b7f5b727b311297dc4e5468f\": rpc error: code = NotFound desc = could not find container \"a7d1082b999f00e49a62d29f026b7dc0d550e161b7f5b727b311297dc4e5468f\": container with ID starting with a7d1082b999f00e49a62d29f026b7dc0d550e161b7f5b727b311297dc4e5468f not found: ID does not exist" Sep 29 21:38:28 crc kubenswrapper[4911]: I0929 21:38:28.708718 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6585a44e-cada-4f08-83f7-4e13bd80bf53" path="/var/lib/kubelet/pods/6585a44e-cada-4f08-83f7-4e13bd80bf53/volumes" Sep 29 21:38:30 crc kubenswrapper[4911]: I0929 21:38:30.633403 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-vcm56"] Sep 29 21:38:30 crc kubenswrapper[4911]: E0929 21:38:30.634031 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6585a44e-cada-4f08-83f7-4e13bd80bf53" containerName="extract-utilities" Sep 29 21:38:30 crc kubenswrapper[4911]: I0929 21:38:30.634050 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="6585a44e-cada-4f08-83f7-4e13bd80bf53" containerName="extract-utilities" Sep 29 21:38:30 crc kubenswrapper[4911]: E0929 21:38:30.634064 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6585a44e-cada-4f08-83f7-4e13bd80bf53" containerName="registry-server" Sep 29 21:38:30 crc kubenswrapper[4911]: I0929 21:38:30.634075 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="6585a44e-cada-4f08-83f7-4e13bd80bf53" containerName="registry-server" Sep 29 21:38:30 crc kubenswrapper[4911]: E0929 21:38:30.634098 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6585a44e-cada-4f08-83f7-4e13bd80bf53" containerName="extract-content" Sep 29 21:38:30 crc kubenswrapper[4911]: I0929 21:38:30.634108 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="6585a44e-cada-4f08-83f7-4e13bd80bf53" containerName="extract-content" Sep 29 21:38:30 crc kubenswrapper[4911]: I0929 21:38:30.634258 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="6585a44e-cada-4f08-83f7-4e13bd80bf53" containerName="registry-server" Sep 29 21:38:30 crc kubenswrapper[4911]: I0929 21:38:30.634743 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-vcm56" Sep 29 21:38:30 crc kubenswrapper[4911]: I0929 21:38:30.637018 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Sep 29 21:38:30 crc kubenswrapper[4911]: I0929 21:38:30.637097 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-x24rm" Sep 29 21:38:30 crc kubenswrapper[4911]: I0929 21:38:30.637135 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Sep 29 21:38:30 crc kubenswrapper[4911]: I0929 21:38:30.641214 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-vcm56"] Sep 29 21:38:30 crc kubenswrapper[4911]: I0929 21:38:30.773278 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2t26d\" (UniqueName: \"kubernetes.io/projected/c1e4af9e-39a5-4de6-9c6f-8a131757c680-kube-api-access-2t26d\") pod \"openstack-operator-index-vcm56\" (UID: \"c1e4af9e-39a5-4de6-9c6f-8a131757c680\") " pod="openstack-operators/openstack-operator-index-vcm56" Sep 29 21:38:30 crc kubenswrapper[4911]: I0929 21:38:30.875537 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2t26d\" (UniqueName: \"kubernetes.io/projected/c1e4af9e-39a5-4de6-9c6f-8a131757c680-kube-api-access-2t26d\") pod \"openstack-operator-index-vcm56\" (UID: \"c1e4af9e-39a5-4de6-9c6f-8a131757c680\") " pod="openstack-operators/openstack-operator-index-vcm56" Sep 29 21:38:30 crc kubenswrapper[4911]: I0929 21:38:30.904677 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2t26d\" (UniqueName: \"kubernetes.io/projected/c1e4af9e-39a5-4de6-9c6f-8a131757c680-kube-api-access-2t26d\") pod \"openstack-operator-index-vcm56\" (UID: \"c1e4af9e-39a5-4de6-9c6f-8a131757c680\") " pod="openstack-operators/openstack-operator-index-vcm56" Sep 29 21:38:30 crc kubenswrapper[4911]: I0929 21:38:30.949654 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-vcm56" Sep 29 21:38:31 crc kubenswrapper[4911]: I0929 21:38:31.073553 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-lj8ph" Sep 29 21:38:31 crc kubenswrapper[4911]: I0929 21:38:31.479214 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-vcm56"] Sep 29 21:38:32 crc kubenswrapper[4911]: I0929 21:38:32.202214 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-vcm56" event={"ID":"c1e4af9e-39a5-4de6-9c6f-8a131757c680","Type":"ContainerStarted","Data":"b845a917a90ecec9df6f4ef2ca7dd7b9c28c3fb79410dcdc8a5c472c63f4bfda"} Sep 29 21:38:33 crc kubenswrapper[4911]: I0929 21:38:33.914050 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-5qnxf" Sep 29 21:38:33 crc kubenswrapper[4911]: I0929 21:38:33.914282 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-5qnxf" Sep 29 21:38:33 crc kubenswrapper[4911]: I0929 21:38:33.970623 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-5qnxf" Sep 29 21:38:34 crc kubenswrapper[4911]: I0929 21:38:34.275238 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-5qnxf" Sep 29 21:38:35 crc kubenswrapper[4911]: I0929 21:38:35.226898 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-vcm56" event={"ID":"c1e4af9e-39a5-4de6-9c6f-8a131757c680","Type":"ContainerStarted","Data":"ee624dd6fd1e7ec6ea881619bae1291ab8d472f4eb5a940c1386909cac1cdefc"} Sep 29 21:38:35 crc kubenswrapper[4911]: I0929 21:38:35.254930 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-vcm56" podStartSLOduration=2.614187686 podStartE2EDuration="5.254905s" podCreationTimestamp="2025-09-29 21:38:30 +0000 UTC" firstStartedPulling="2025-09-29 21:38:31.453681311 +0000 UTC m=+789.430793992" lastFinishedPulling="2025-09-29 21:38:34.094398635 +0000 UTC m=+792.071511306" observedRunningTime="2025-09-29 21:38:35.252111771 +0000 UTC m=+793.229224472" watchObservedRunningTime="2025-09-29 21:38:35.254905 +0000 UTC m=+793.232017701" Sep 29 21:38:35 crc kubenswrapper[4911]: I0929 21:38:35.625150 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-5qnxf"] Sep 29 21:38:37 crc kubenswrapper[4911]: I0929 21:38:37.242188 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-5qnxf" podUID="2638699d-3494-4a19-ac0b-2373e0d1943f" containerName="registry-server" containerID="cri-o://11d4d7dd597a524735c597b9d271c7b039264f6d3979794e11cc5aeadd923160" gracePeriod=2 Sep 29 21:38:37 crc kubenswrapper[4911]: I0929 21:38:37.684421 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5qnxf" Sep 29 21:38:37 crc kubenswrapper[4911]: I0929 21:38:37.787866 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2638699d-3494-4a19-ac0b-2373e0d1943f-catalog-content\") pod \"2638699d-3494-4a19-ac0b-2373e0d1943f\" (UID: \"2638699d-3494-4a19-ac0b-2373e0d1943f\") " Sep 29 21:38:37 crc kubenswrapper[4911]: I0929 21:38:37.788037 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7bd6r\" (UniqueName: \"kubernetes.io/projected/2638699d-3494-4a19-ac0b-2373e0d1943f-kube-api-access-7bd6r\") pod \"2638699d-3494-4a19-ac0b-2373e0d1943f\" (UID: \"2638699d-3494-4a19-ac0b-2373e0d1943f\") " Sep 29 21:38:37 crc kubenswrapper[4911]: I0929 21:38:37.788089 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2638699d-3494-4a19-ac0b-2373e0d1943f-utilities\") pod \"2638699d-3494-4a19-ac0b-2373e0d1943f\" (UID: \"2638699d-3494-4a19-ac0b-2373e0d1943f\") " Sep 29 21:38:37 crc kubenswrapper[4911]: I0929 21:38:37.789667 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2638699d-3494-4a19-ac0b-2373e0d1943f-utilities" (OuterVolumeSpecName: "utilities") pod "2638699d-3494-4a19-ac0b-2373e0d1943f" (UID: "2638699d-3494-4a19-ac0b-2373e0d1943f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:38:37 crc kubenswrapper[4911]: I0929 21:38:37.797017 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2638699d-3494-4a19-ac0b-2373e0d1943f-kube-api-access-7bd6r" (OuterVolumeSpecName: "kube-api-access-7bd6r") pod "2638699d-3494-4a19-ac0b-2373e0d1943f" (UID: "2638699d-3494-4a19-ac0b-2373e0d1943f"). InnerVolumeSpecName "kube-api-access-7bd6r". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:38:37 crc kubenswrapper[4911]: I0929 21:38:37.879211 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2638699d-3494-4a19-ac0b-2373e0d1943f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2638699d-3494-4a19-ac0b-2373e0d1943f" (UID: "2638699d-3494-4a19-ac0b-2373e0d1943f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:38:37 crc kubenswrapper[4911]: I0929 21:38:37.890556 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2638699d-3494-4a19-ac0b-2373e0d1943f-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 21:38:37 crc kubenswrapper[4911]: I0929 21:38:37.890607 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7bd6r\" (UniqueName: \"kubernetes.io/projected/2638699d-3494-4a19-ac0b-2373e0d1943f-kube-api-access-7bd6r\") on node \"crc\" DevicePath \"\"" Sep 29 21:38:37 crc kubenswrapper[4911]: I0929 21:38:37.890632 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2638699d-3494-4a19-ac0b-2373e0d1943f-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 21:38:38 crc kubenswrapper[4911]: I0929 21:38:38.252670 4911 generic.go:334] "Generic (PLEG): container finished" podID="2638699d-3494-4a19-ac0b-2373e0d1943f" containerID="11d4d7dd597a524735c597b9d271c7b039264f6d3979794e11cc5aeadd923160" exitCode=0 Sep 29 21:38:38 crc kubenswrapper[4911]: I0929 21:38:38.252732 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5qnxf" Sep 29 21:38:38 crc kubenswrapper[4911]: I0929 21:38:38.252753 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5qnxf" event={"ID":"2638699d-3494-4a19-ac0b-2373e0d1943f","Type":"ContainerDied","Data":"11d4d7dd597a524735c597b9d271c7b039264f6d3979794e11cc5aeadd923160"} Sep 29 21:38:38 crc kubenswrapper[4911]: I0929 21:38:38.252877 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5qnxf" event={"ID":"2638699d-3494-4a19-ac0b-2373e0d1943f","Type":"ContainerDied","Data":"76b373b6eb5676b085d89682ebeb052ceaabc1c43b8a4b5c4f0e8b4752335e9d"} Sep 29 21:38:38 crc kubenswrapper[4911]: I0929 21:38:38.252919 4911 scope.go:117] "RemoveContainer" containerID="11d4d7dd597a524735c597b9d271c7b039264f6d3979794e11cc5aeadd923160" Sep 29 21:38:38 crc kubenswrapper[4911]: I0929 21:38:38.274554 4911 scope.go:117] "RemoveContainer" containerID="885c338b49434746dd368e9ca841076d023b8f64928eb2409b81782d27205559" Sep 29 21:38:38 crc kubenswrapper[4911]: I0929 21:38:38.294003 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-5qnxf"] Sep 29 21:38:38 crc kubenswrapper[4911]: I0929 21:38:38.308246 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-5qnxf"] Sep 29 21:38:38 crc kubenswrapper[4911]: I0929 21:38:38.335277 4911 scope.go:117] "RemoveContainer" containerID="acda686f86ccb16156612811325f8b355c6fe7a6898e27995b70c17f67d53619" Sep 29 21:38:38 crc kubenswrapper[4911]: I0929 21:38:38.357063 4911 scope.go:117] "RemoveContainer" containerID="11d4d7dd597a524735c597b9d271c7b039264f6d3979794e11cc5aeadd923160" Sep 29 21:38:38 crc kubenswrapper[4911]: E0929 21:38:38.357515 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"11d4d7dd597a524735c597b9d271c7b039264f6d3979794e11cc5aeadd923160\": container with ID starting with 11d4d7dd597a524735c597b9d271c7b039264f6d3979794e11cc5aeadd923160 not found: ID does not exist" containerID="11d4d7dd597a524735c597b9d271c7b039264f6d3979794e11cc5aeadd923160" Sep 29 21:38:38 crc kubenswrapper[4911]: I0929 21:38:38.357565 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"11d4d7dd597a524735c597b9d271c7b039264f6d3979794e11cc5aeadd923160"} err="failed to get container status \"11d4d7dd597a524735c597b9d271c7b039264f6d3979794e11cc5aeadd923160\": rpc error: code = NotFound desc = could not find container \"11d4d7dd597a524735c597b9d271c7b039264f6d3979794e11cc5aeadd923160\": container with ID starting with 11d4d7dd597a524735c597b9d271c7b039264f6d3979794e11cc5aeadd923160 not found: ID does not exist" Sep 29 21:38:38 crc kubenswrapper[4911]: I0929 21:38:38.357600 4911 scope.go:117] "RemoveContainer" containerID="885c338b49434746dd368e9ca841076d023b8f64928eb2409b81782d27205559" Sep 29 21:38:38 crc kubenswrapper[4911]: E0929 21:38:38.358147 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"885c338b49434746dd368e9ca841076d023b8f64928eb2409b81782d27205559\": container with ID starting with 885c338b49434746dd368e9ca841076d023b8f64928eb2409b81782d27205559 not found: ID does not exist" containerID="885c338b49434746dd368e9ca841076d023b8f64928eb2409b81782d27205559" Sep 29 21:38:38 crc kubenswrapper[4911]: I0929 21:38:38.358183 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"885c338b49434746dd368e9ca841076d023b8f64928eb2409b81782d27205559"} err="failed to get container status \"885c338b49434746dd368e9ca841076d023b8f64928eb2409b81782d27205559\": rpc error: code = NotFound desc = could not find container \"885c338b49434746dd368e9ca841076d023b8f64928eb2409b81782d27205559\": container with ID starting with 885c338b49434746dd368e9ca841076d023b8f64928eb2409b81782d27205559 not found: ID does not exist" Sep 29 21:38:38 crc kubenswrapper[4911]: I0929 21:38:38.358208 4911 scope.go:117] "RemoveContainer" containerID="acda686f86ccb16156612811325f8b355c6fe7a6898e27995b70c17f67d53619" Sep 29 21:38:38 crc kubenswrapper[4911]: E0929 21:38:38.358544 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"acda686f86ccb16156612811325f8b355c6fe7a6898e27995b70c17f67d53619\": container with ID starting with acda686f86ccb16156612811325f8b355c6fe7a6898e27995b70c17f67d53619 not found: ID does not exist" containerID="acda686f86ccb16156612811325f8b355c6fe7a6898e27995b70c17f67d53619" Sep 29 21:38:38 crc kubenswrapper[4911]: I0929 21:38:38.358710 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"acda686f86ccb16156612811325f8b355c6fe7a6898e27995b70c17f67d53619"} err="failed to get container status \"acda686f86ccb16156612811325f8b355c6fe7a6898e27995b70c17f67d53619\": rpc error: code = NotFound desc = could not find container \"acda686f86ccb16156612811325f8b355c6fe7a6898e27995b70c17f67d53619\": container with ID starting with acda686f86ccb16156612811325f8b355c6fe7a6898e27995b70c17f67d53619 not found: ID does not exist" Sep 29 21:38:38 crc kubenswrapper[4911]: I0929 21:38:38.716289 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2638699d-3494-4a19-ac0b-2373e0d1943f" path="/var/lib/kubelet/pods/2638699d-3494-4a19-ac0b-2373e0d1943f/volumes" Sep 29 21:38:40 crc kubenswrapper[4911]: I0929 21:38:40.951143 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-vcm56" Sep 29 21:38:40 crc kubenswrapper[4911]: I0929 21:38:40.951233 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-vcm56" Sep 29 21:38:41 crc kubenswrapper[4911]: I0929 21:38:41.006681 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-vcm56" Sep 29 21:38:41 crc kubenswrapper[4911]: I0929 21:38:41.309748 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-vcm56" Sep 29 21:38:44 crc kubenswrapper[4911]: I0929 21:38:44.284378 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/0a88257fefbb0eb964355b1a6a6c00b1c079325d09c0c163900fee4756sb789"] Sep 29 21:38:44 crc kubenswrapper[4911]: E0929 21:38:44.285119 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2638699d-3494-4a19-ac0b-2373e0d1943f" containerName="extract-content" Sep 29 21:38:44 crc kubenswrapper[4911]: I0929 21:38:44.285141 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="2638699d-3494-4a19-ac0b-2373e0d1943f" containerName="extract-content" Sep 29 21:38:44 crc kubenswrapper[4911]: E0929 21:38:44.285161 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2638699d-3494-4a19-ac0b-2373e0d1943f" containerName="registry-server" Sep 29 21:38:44 crc kubenswrapper[4911]: I0929 21:38:44.285174 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="2638699d-3494-4a19-ac0b-2373e0d1943f" containerName="registry-server" Sep 29 21:38:44 crc kubenswrapper[4911]: E0929 21:38:44.285197 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2638699d-3494-4a19-ac0b-2373e0d1943f" containerName="extract-utilities" Sep 29 21:38:44 crc kubenswrapper[4911]: I0929 21:38:44.285210 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="2638699d-3494-4a19-ac0b-2373e0d1943f" containerName="extract-utilities" Sep 29 21:38:44 crc kubenswrapper[4911]: I0929 21:38:44.285396 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="2638699d-3494-4a19-ac0b-2373e0d1943f" containerName="registry-server" Sep 29 21:38:44 crc kubenswrapper[4911]: I0929 21:38:44.286900 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/0a88257fefbb0eb964355b1a6a6c00b1c079325d09c0c163900fee4756sb789" Sep 29 21:38:44 crc kubenswrapper[4911]: I0929 21:38:44.297398 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/0a88257fefbb0eb964355b1a6a6c00b1c079325d09c0c163900fee4756sb789"] Sep 29 21:38:44 crc kubenswrapper[4911]: I0929 21:38:44.297490 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-2wfrk" Sep 29 21:38:44 crc kubenswrapper[4911]: I0929 21:38:44.384911 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m5sk6\" (UniqueName: \"kubernetes.io/projected/fba66f38-d6ca-4922-a098-30d733edfdc1-kube-api-access-m5sk6\") pod \"0a88257fefbb0eb964355b1a6a6c00b1c079325d09c0c163900fee4756sb789\" (UID: \"fba66f38-d6ca-4922-a098-30d733edfdc1\") " pod="openstack-operators/0a88257fefbb0eb964355b1a6a6c00b1c079325d09c0c163900fee4756sb789" Sep 29 21:38:44 crc kubenswrapper[4911]: I0929 21:38:44.385027 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/fba66f38-d6ca-4922-a098-30d733edfdc1-util\") pod \"0a88257fefbb0eb964355b1a6a6c00b1c079325d09c0c163900fee4756sb789\" (UID: \"fba66f38-d6ca-4922-a098-30d733edfdc1\") " pod="openstack-operators/0a88257fefbb0eb964355b1a6a6c00b1c079325d09c0c163900fee4756sb789" Sep 29 21:38:44 crc kubenswrapper[4911]: I0929 21:38:44.385285 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/fba66f38-d6ca-4922-a098-30d733edfdc1-bundle\") pod \"0a88257fefbb0eb964355b1a6a6c00b1c079325d09c0c163900fee4756sb789\" (UID: \"fba66f38-d6ca-4922-a098-30d733edfdc1\") " pod="openstack-operators/0a88257fefbb0eb964355b1a6a6c00b1c079325d09c0c163900fee4756sb789" Sep 29 21:38:44 crc kubenswrapper[4911]: I0929 21:38:44.486877 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m5sk6\" (UniqueName: \"kubernetes.io/projected/fba66f38-d6ca-4922-a098-30d733edfdc1-kube-api-access-m5sk6\") pod \"0a88257fefbb0eb964355b1a6a6c00b1c079325d09c0c163900fee4756sb789\" (UID: \"fba66f38-d6ca-4922-a098-30d733edfdc1\") " pod="openstack-operators/0a88257fefbb0eb964355b1a6a6c00b1c079325d09c0c163900fee4756sb789" Sep 29 21:38:44 crc kubenswrapper[4911]: I0929 21:38:44.486972 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/fba66f38-d6ca-4922-a098-30d733edfdc1-util\") pod \"0a88257fefbb0eb964355b1a6a6c00b1c079325d09c0c163900fee4756sb789\" (UID: \"fba66f38-d6ca-4922-a098-30d733edfdc1\") " pod="openstack-operators/0a88257fefbb0eb964355b1a6a6c00b1c079325d09c0c163900fee4756sb789" Sep 29 21:38:44 crc kubenswrapper[4911]: I0929 21:38:44.487092 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/fba66f38-d6ca-4922-a098-30d733edfdc1-bundle\") pod \"0a88257fefbb0eb964355b1a6a6c00b1c079325d09c0c163900fee4756sb789\" (UID: \"fba66f38-d6ca-4922-a098-30d733edfdc1\") " pod="openstack-operators/0a88257fefbb0eb964355b1a6a6c00b1c079325d09c0c163900fee4756sb789" Sep 29 21:38:44 crc kubenswrapper[4911]: I0929 21:38:44.487595 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/fba66f38-d6ca-4922-a098-30d733edfdc1-util\") pod \"0a88257fefbb0eb964355b1a6a6c00b1c079325d09c0c163900fee4756sb789\" (UID: \"fba66f38-d6ca-4922-a098-30d733edfdc1\") " pod="openstack-operators/0a88257fefbb0eb964355b1a6a6c00b1c079325d09c0c163900fee4756sb789" Sep 29 21:38:44 crc kubenswrapper[4911]: I0929 21:38:44.487678 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/fba66f38-d6ca-4922-a098-30d733edfdc1-bundle\") pod \"0a88257fefbb0eb964355b1a6a6c00b1c079325d09c0c163900fee4756sb789\" (UID: \"fba66f38-d6ca-4922-a098-30d733edfdc1\") " pod="openstack-operators/0a88257fefbb0eb964355b1a6a6c00b1c079325d09c0c163900fee4756sb789" Sep 29 21:38:44 crc kubenswrapper[4911]: I0929 21:38:44.529691 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m5sk6\" (UniqueName: \"kubernetes.io/projected/fba66f38-d6ca-4922-a098-30d733edfdc1-kube-api-access-m5sk6\") pod \"0a88257fefbb0eb964355b1a6a6c00b1c079325d09c0c163900fee4756sb789\" (UID: \"fba66f38-d6ca-4922-a098-30d733edfdc1\") " pod="openstack-operators/0a88257fefbb0eb964355b1a6a6c00b1c079325d09c0c163900fee4756sb789" Sep 29 21:38:44 crc kubenswrapper[4911]: I0929 21:38:44.608941 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/0a88257fefbb0eb964355b1a6a6c00b1c079325d09c0c163900fee4756sb789" Sep 29 21:38:45 crc kubenswrapper[4911]: I0929 21:38:45.063310 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/0a88257fefbb0eb964355b1a6a6c00b1c079325d09c0c163900fee4756sb789"] Sep 29 21:38:45 crc kubenswrapper[4911]: I0929 21:38:45.305663 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/0a88257fefbb0eb964355b1a6a6c00b1c079325d09c0c163900fee4756sb789" event={"ID":"fba66f38-d6ca-4922-a098-30d733edfdc1","Type":"ContainerStarted","Data":"d74d50a3ab52269e03d46daddb9cbf31c9b16b20b259e2bc00a93b52c8c07c8e"} Sep 29 21:38:45 crc kubenswrapper[4911]: I0929 21:38:45.305715 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/0a88257fefbb0eb964355b1a6a6c00b1c079325d09c0c163900fee4756sb789" event={"ID":"fba66f38-d6ca-4922-a098-30d733edfdc1","Type":"ContainerStarted","Data":"fd3500024ad3fa1823946297453407bdc9484bd00c01544407f0e20a6d267aa5"} Sep 29 21:38:45 crc kubenswrapper[4911]: I0929 21:38:45.429714 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-m8zzr"] Sep 29 21:38:45 crc kubenswrapper[4911]: I0929 21:38:45.431755 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m8zzr" Sep 29 21:38:45 crc kubenswrapper[4911]: I0929 21:38:45.442581 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-m8zzr"] Sep 29 21:38:45 crc kubenswrapper[4911]: I0929 21:38:45.525354 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4gt8n\" (UniqueName: \"kubernetes.io/projected/201b73b2-db5b-4496-81fa-7d3c87acbf2f-kube-api-access-4gt8n\") pod \"redhat-operators-m8zzr\" (UID: \"201b73b2-db5b-4496-81fa-7d3c87acbf2f\") " pod="openshift-marketplace/redhat-operators-m8zzr" Sep 29 21:38:45 crc kubenswrapper[4911]: I0929 21:38:45.525467 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/201b73b2-db5b-4496-81fa-7d3c87acbf2f-utilities\") pod \"redhat-operators-m8zzr\" (UID: \"201b73b2-db5b-4496-81fa-7d3c87acbf2f\") " pod="openshift-marketplace/redhat-operators-m8zzr" Sep 29 21:38:45 crc kubenswrapper[4911]: I0929 21:38:45.525555 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/201b73b2-db5b-4496-81fa-7d3c87acbf2f-catalog-content\") pod \"redhat-operators-m8zzr\" (UID: \"201b73b2-db5b-4496-81fa-7d3c87acbf2f\") " pod="openshift-marketplace/redhat-operators-m8zzr" Sep 29 21:38:45 crc kubenswrapper[4911]: I0929 21:38:45.627081 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4gt8n\" (UniqueName: \"kubernetes.io/projected/201b73b2-db5b-4496-81fa-7d3c87acbf2f-kube-api-access-4gt8n\") pod \"redhat-operators-m8zzr\" (UID: \"201b73b2-db5b-4496-81fa-7d3c87acbf2f\") " pod="openshift-marketplace/redhat-operators-m8zzr" Sep 29 21:38:45 crc kubenswrapper[4911]: I0929 21:38:45.627201 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/201b73b2-db5b-4496-81fa-7d3c87acbf2f-utilities\") pod \"redhat-operators-m8zzr\" (UID: \"201b73b2-db5b-4496-81fa-7d3c87acbf2f\") " pod="openshift-marketplace/redhat-operators-m8zzr" Sep 29 21:38:45 crc kubenswrapper[4911]: I0929 21:38:45.627728 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/201b73b2-db5b-4496-81fa-7d3c87acbf2f-utilities\") pod \"redhat-operators-m8zzr\" (UID: \"201b73b2-db5b-4496-81fa-7d3c87acbf2f\") " pod="openshift-marketplace/redhat-operators-m8zzr" Sep 29 21:38:45 crc kubenswrapper[4911]: I0929 21:38:45.627911 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/201b73b2-db5b-4496-81fa-7d3c87acbf2f-catalog-content\") pod \"redhat-operators-m8zzr\" (UID: \"201b73b2-db5b-4496-81fa-7d3c87acbf2f\") " pod="openshift-marketplace/redhat-operators-m8zzr" Sep 29 21:38:45 crc kubenswrapper[4911]: I0929 21:38:45.628222 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/201b73b2-db5b-4496-81fa-7d3c87acbf2f-catalog-content\") pod \"redhat-operators-m8zzr\" (UID: \"201b73b2-db5b-4496-81fa-7d3c87acbf2f\") " pod="openshift-marketplace/redhat-operators-m8zzr" Sep 29 21:38:45 crc kubenswrapper[4911]: I0929 21:38:45.656880 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4gt8n\" (UniqueName: \"kubernetes.io/projected/201b73b2-db5b-4496-81fa-7d3c87acbf2f-kube-api-access-4gt8n\") pod \"redhat-operators-m8zzr\" (UID: \"201b73b2-db5b-4496-81fa-7d3c87acbf2f\") " pod="openshift-marketplace/redhat-operators-m8zzr" Sep 29 21:38:45 crc kubenswrapper[4911]: I0929 21:38:45.762844 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m8zzr" Sep 29 21:38:46 crc kubenswrapper[4911]: I0929 21:38:46.058573 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-m8zzr"] Sep 29 21:38:46 crc kubenswrapper[4911]: I0929 21:38:46.313023 4911 generic.go:334] "Generic (PLEG): container finished" podID="fba66f38-d6ca-4922-a098-30d733edfdc1" containerID="d74d50a3ab52269e03d46daddb9cbf31c9b16b20b259e2bc00a93b52c8c07c8e" exitCode=0 Sep 29 21:38:46 crc kubenswrapper[4911]: I0929 21:38:46.313130 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/0a88257fefbb0eb964355b1a6a6c00b1c079325d09c0c163900fee4756sb789" event={"ID":"fba66f38-d6ca-4922-a098-30d733edfdc1","Type":"ContainerDied","Data":"d74d50a3ab52269e03d46daddb9cbf31c9b16b20b259e2bc00a93b52c8c07c8e"} Sep 29 21:38:46 crc kubenswrapper[4911]: I0929 21:38:46.316123 4911 generic.go:334] "Generic (PLEG): container finished" podID="201b73b2-db5b-4496-81fa-7d3c87acbf2f" containerID="5a7cd834ac5d7ae5c74f96925082ac087bc2ae0865c3f0417e30388fdd5c93aa" exitCode=0 Sep 29 21:38:46 crc kubenswrapper[4911]: I0929 21:38:46.316157 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m8zzr" event={"ID":"201b73b2-db5b-4496-81fa-7d3c87acbf2f","Type":"ContainerDied","Data":"5a7cd834ac5d7ae5c74f96925082ac087bc2ae0865c3f0417e30388fdd5c93aa"} Sep 29 21:38:46 crc kubenswrapper[4911]: I0929 21:38:46.316177 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m8zzr" event={"ID":"201b73b2-db5b-4496-81fa-7d3c87acbf2f","Type":"ContainerStarted","Data":"990d7967be4577255d98c68bc2759e1365ad716a5322e4f6c57d1f9788dda863"} Sep 29 21:38:47 crc kubenswrapper[4911]: I0929 21:38:47.333921 4911 generic.go:334] "Generic (PLEG): container finished" podID="fba66f38-d6ca-4922-a098-30d733edfdc1" containerID="ae222908ea0f39ebd1e756b0af5dfe7df6a79fd97a69e680c66f52788d486085" exitCode=0 Sep 29 21:38:47 crc kubenswrapper[4911]: I0929 21:38:47.334719 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/0a88257fefbb0eb964355b1a6a6c00b1c079325d09c0c163900fee4756sb789" event={"ID":"fba66f38-d6ca-4922-a098-30d733edfdc1","Type":"ContainerDied","Data":"ae222908ea0f39ebd1e756b0af5dfe7df6a79fd97a69e680c66f52788d486085"} Sep 29 21:38:48 crc kubenswrapper[4911]: I0929 21:38:48.344989 4911 generic.go:334] "Generic (PLEG): container finished" podID="fba66f38-d6ca-4922-a098-30d733edfdc1" containerID="507f40e709d151c27a050b48a016d6cda3229f7104ea5f325a58f2a8e69ca1de" exitCode=0 Sep 29 21:38:48 crc kubenswrapper[4911]: I0929 21:38:48.345039 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/0a88257fefbb0eb964355b1a6a6c00b1c079325d09c0c163900fee4756sb789" event={"ID":"fba66f38-d6ca-4922-a098-30d733edfdc1","Type":"ContainerDied","Data":"507f40e709d151c27a050b48a016d6cda3229f7104ea5f325a58f2a8e69ca1de"} Sep 29 21:38:48 crc kubenswrapper[4911]: I0929 21:38:48.349213 4911 generic.go:334] "Generic (PLEG): container finished" podID="201b73b2-db5b-4496-81fa-7d3c87acbf2f" containerID="247bcc847ce9800cc1d6dfa8b02b4a5dcdc7c851cbe0bbec9f6ca8dbd3e29127" exitCode=0 Sep 29 21:38:48 crc kubenswrapper[4911]: I0929 21:38:48.349256 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m8zzr" event={"ID":"201b73b2-db5b-4496-81fa-7d3c87acbf2f","Type":"ContainerDied","Data":"247bcc847ce9800cc1d6dfa8b02b4a5dcdc7c851cbe0bbec9f6ca8dbd3e29127"} Sep 29 21:38:49 crc kubenswrapper[4911]: I0929 21:38:49.358052 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m8zzr" event={"ID":"201b73b2-db5b-4496-81fa-7d3c87acbf2f","Type":"ContainerStarted","Data":"248db6b76dad60f394acf74bdac8808d1ae30fdc7c3945b9e6a07db5d65eb47a"} Sep 29 21:38:49 crc kubenswrapper[4911]: I0929 21:38:49.389949 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-m8zzr" podStartSLOduration=1.931557027 podStartE2EDuration="4.389935239s" podCreationTimestamp="2025-09-29 21:38:45 +0000 UTC" firstStartedPulling="2025-09-29 21:38:46.317442589 +0000 UTC m=+804.294555260" lastFinishedPulling="2025-09-29 21:38:48.775820761 +0000 UTC m=+806.752933472" observedRunningTime="2025-09-29 21:38:49.388504064 +0000 UTC m=+807.365616735" watchObservedRunningTime="2025-09-29 21:38:49.389935239 +0000 UTC m=+807.367047910" Sep 29 21:38:49 crc kubenswrapper[4911]: I0929 21:38:49.658896 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/0a88257fefbb0eb964355b1a6a6c00b1c079325d09c0c163900fee4756sb789" Sep 29 21:38:49 crc kubenswrapper[4911]: I0929 21:38:49.792243 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/fba66f38-d6ca-4922-a098-30d733edfdc1-bundle\") pod \"fba66f38-d6ca-4922-a098-30d733edfdc1\" (UID: \"fba66f38-d6ca-4922-a098-30d733edfdc1\") " Sep 29 21:38:49 crc kubenswrapper[4911]: I0929 21:38:49.792759 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m5sk6\" (UniqueName: \"kubernetes.io/projected/fba66f38-d6ca-4922-a098-30d733edfdc1-kube-api-access-m5sk6\") pod \"fba66f38-d6ca-4922-a098-30d733edfdc1\" (UID: \"fba66f38-d6ca-4922-a098-30d733edfdc1\") " Sep 29 21:38:49 crc kubenswrapper[4911]: I0929 21:38:49.792851 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/fba66f38-d6ca-4922-a098-30d733edfdc1-util\") pod \"fba66f38-d6ca-4922-a098-30d733edfdc1\" (UID: \"fba66f38-d6ca-4922-a098-30d733edfdc1\") " Sep 29 21:38:49 crc kubenswrapper[4911]: I0929 21:38:49.795248 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fba66f38-d6ca-4922-a098-30d733edfdc1-bundle" (OuterVolumeSpecName: "bundle") pod "fba66f38-d6ca-4922-a098-30d733edfdc1" (UID: "fba66f38-d6ca-4922-a098-30d733edfdc1"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:38:49 crc kubenswrapper[4911]: I0929 21:38:49.798715 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fba66f38-d6ca-4922-a098-30d733edfdc1-kube-api-access-m5sk6" (OuterVolumeSpecName: "kube-api-access-m5sk6") pod "fba66f38-d6ca-4922-a098-30d733edfdc1" (UID: "fba66f38-d6ca-4922-a098-30d733edfdc1"). InnerVolumeSpecName "kube-api-access-m5sk6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:38:49 crc kubenswrapper[4911]: I0929 21:38:49.825885 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fba66f38-d6ca-4922-a098-30d733edfdc1-util" (OuterVolumeSpecName: "util") pod "fba66f38-d6ca-4922-a098-30d733edfdc1" (UID: "fba66f38-d6ca-4922-a098-30d733edfdc1"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:38:49 crc kubenswrapper[4911]: I0929 21:38:49.894151 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m5sk6\" (UniqueName: \"kubernetes.io/projected/fba66f38-d6ca-4922-a098-30d733edfdc1-kube-api-access-m5sk6\") on node \"crc\" DevicePath \"\"" Sep 29 21:38:49 crc kubenswrapper[4911]: I0929 21:38:49.894196 4911 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/fba66f38-d6ca-4922-a098-30d733edfdc1-util\") on node \"crc\" DevicePath \"\"" Sep 29 21:38:49 crc kubenswrapper[4911]: I0929 21:38:49.894213 4911 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/fba66f38-d6ca-4922-a098-30d733edfdc1-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 21:38:50 crc kubenswrapper[4911]: I0929 21:38:50.368215 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/0a88257fefbb0eb964355b1a6a6c00b1c079325d09c0c163900fee4756sb789" Sep 29 21:38:50 crc kubenswrapper[4911]: I0929 21:38:50.368205 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/0a88257fefbb0eb964355b1a6a6c00b1c079325d09c0c163900fee4756sb789" event={"ID":"fba66f38-d6ca-4922-a098-30d733edfdc1","Type":"ContainerDied","Data":"fd3500024ad3fa1823946297453407bdc9484bd00c01544407f0e20a6d267aa5"} Sep 29 21:38:50 crc kubenswrapper[4911]: I0929 21:38:50.368275 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fd3500024ad3fa1823946297453407bdc9484bd00c01544407f0e20a6d267aa5" Sep 29 21:38:53 crc kubenswrapper[4911]: I0929 21:38:53.790344 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-55fbd568cd-t2lpx"] Sep 29 21:38:53 crc kubenswrapper[4911]: E0929 21:38:53.790960 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fba66f38-d6ca-4922-a098-30d733edfdc1" containerName="pull" Sep 29 21:38:53 crc kubenswrapper[4911]: I0929 21:38:53.790977 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="fba66f38-d6ca-4922-a098-30d733edfdc1" containerName="pull" Sep 29 21:38:53 crc kubenswrapper[4911]: E0929 21:38:53.790994 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fba66f38-d6ca-4922-a098-30d733edfdc1" containerName="util" Sep 29 21:38:53 crc kubenswrapper[4911]: I0929 21:38:53.791001 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="fba66f38-d6ca-4922-a098-30d733edfdc1" containerName="util" Sep 29 21:38:53 crc kubenswrapper[4911]: E0929 21:38:53.791018 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fba66f38-d6ca-4922-a098-30d733edfdc1" containerName="extract" Sep 29 21:38:53 crc kubenswrapper[4911]: I0929 21:38:53.791027 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="fba66f38-d6ca-4922-a098-30d733edfdc1" containerName="extract" Sep 29 21:38:53 crc kubenswrapper[4911]: I0929 21:38:53.791178 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="fba66f38-d6ca-4922-a098-30d733edfdc1" containerName="extract" Sep 29 21:38:53 crc kubenswrapper[4911]: I0929 21:38:53.791942 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-55fbd568cd-t2lpx" Sep 29 21:38:53 crc kubenswrapper[4911]: I0929 21:38:53.794379 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-sbll5" Sep 29 21:38:53 crc kubenswrapper[4911]: I0929 21:38:53.811831 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-55fbd568cd-t2lpx"] Sep 29 21:38:53 crc kubenswrapper[4911]: I0929 21:38:53.945740 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6sxgc\" (UniqueName: \"kubernetes.io/projected/c1c21e4d-dd2a-440b-8f66-330667131f49-kube-api-access-6sxgc\") pod \"openstack-operator-controller-operator-55fbd568cd-t2lpx\" (UID: \"c1c21e4d-dd2a-440b-8f66-330667131f49\") " pod="openstack-operators/openstack-operator-controller-operator-55fbd568cd-t2lpx" Sep 29 21:38:54 crc kubenswrapper[4911]: I0929 21:38:54.047299 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6sxgc\" (UniqueName: \"kubernetes.io/projected/c1c21e4d-dd2a-440b-8f66-330667131f49-kube-api-access-6sxgc\") pod \"openstack-operator-controller-operator-55fbd568cd-t2lpx\" (UID: \"c1c21e4d-dd2a-440b-8f66-330667131f49\") " pod="openstack-operators/openstack-operator-controller-operator-55fbd568cd-t2lpx" Sep 29 21:38:54 crc kubenswrapper[4911]: I0929 21:38:54.083865 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6sxgc\" (UniqueName: \"kubernetes.io/projected/c1c21e4d-dd2a-440b-8f66-330667131f49-kube-api-access-6sxgc\") pod \"openstack-operator-controller-operator-55fbd568cd-t2lpx\" (UID: \"c1c21e4d-dd2a-440b-8f66-330667131f49\") " pod="openstack-operators/openstack-operator-controller-operator-55fbd568cd-t2lpx" Sep 29 21:38:54 crc kubenswrapper[4911]: I0929 21:38:54.108514 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-55fbd568cd-t2lpx" Sep 29 21:38:54 crc kubenswrapper[4911]: I0929 21:38:54.554537 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-55fbd568cd-t2lpx"] Sep 29 21:38:55 crc kubenswrapper[4911]: I0929 21:38:55.414530 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-55fbd568cd-t2lpx" event={"ID":"c1c21e4d-dd2a-440b-8f66-330667131f49","Type":"ContainerStarted","Data":"ccf65b89bf21773629dae50132cea70acdfeaefc11d24702a2bcf839e98f8df2"} Sep 29 21:38:55 crc kubenswrapper[4911]: I0929 21:38:55.763977 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-m8zzr" Sep 29 21:38:55 crc kubenswrapper[4911]: I0929 21:38:55.764026 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-m8zzr" Sep 29 21:38:55 crc kubenswrapper[4911]: I0929 21:38:55.807155 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-m8zzr" Sep 29 21:38:56 crc kubenswrapper[4911]: I0929 21:38:56.471134 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-m8zzr" Sep 29 21:38:59 crc kubenswrapper[4911]: I0929 21:38:59.429166 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-m8zzr"] Sep 29 21:38:59 crc kubenswrapper[4911]: I0929 21:38:59.429914 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-m8zzr" podUID="201b73b2-db5b-4496-81fa-7d3c87acbf2f" containerName="registry-server" containerID="cri-o://248db6b76dad60f394acf74bdac8808d1ae30fdc7c3945b9e6a07db5d65eb47a" gracePeriod=2 Sep 29 21:38:59 crc kubenswrapper[4911]: I0929 21:38:59.446723 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-55fbd568cd-t2lpx" event={"ID":"c1c21e4d-dd2a-440b-8f66-330667131f49","Type":"ContainerStarted","Data":"b0ba9caded234a6de60945ca6e486f07a0de8c5efc4e251cb56e7137ae853501"} Sep 29 21:38:59 crc kubenswrapper[4911]: I0929 21:38:59.818282 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m8zzr" Sep 29 21:38:59 crc kubenswrapper[4911]: I0929 21:38:59.941195 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4gt8n\" (UniqueName: \"kubernetes.io/projected/201b73b2-db5b-4496-81fa-7d3c87acbf2f-kube-api-access-4gt8n\") pod \"201b73b2-db5b-4496-81fa-7d3c87acbf2f\" (UID: \"201b73b2-db5b-4496-81fa-7d3c87acbf2f\") " Sep 29 21:38:59 crc kubenswrapper[4911]: I0929 21:38:59.942352 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/201b73b2-db5b-4496-81fa-7d3c87acbf2f-utilities\") pod \"201b73b2-db5b-4496-81fa-7d3c87acbf2f\" (UID: \"201b73b2-db5b-4496-81fa-7d3c87acbf2f\") " Sep 29 21:38:59 crc kubenswrapper[4911]: I0929 21:38:59.942518 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/201b73b2-db5b-4496-81fa-7d3c87acbf2f-catalog-content\") pod \"201b73b2-db5b-4496-81fa-7d3c87acbf2f\" (UID: \"201b73b2-db5b-4496-81fa-7d3c87acbf2f\") " Sep 29 21:38:59 crc kubenswrapper[4911]: I0929 21:38:59.943221 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/201b73b2-db5b-4496-81fa-7d3c87acbf2f-utilities" (OuterVolumeSpecName: "utilities") pod "201b73b2-db5b-4496-81fa-7d3c87acbf2f" (UID: "201b73b2-db5b-4496-81fa-7d3c87acbf2f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:38:59 crc kubenswrapper[4911]: I0929 21:38:59.953000 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/201b73b2-db5b-4496-81fa-7d3c87acbf2f-kube-api-access-4gt8n" (OuterVolumeSpecName: "kube-api-access-4gt8n") pod "201b73b2-db5b-4496-81fa-7d3c87acbf2f" (UID: "201b73b2-db5b-4496-81fa-7d3c87acbf2f"). InnerVolumeSpecName "kube-api-access-4gt8n". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:39:00 crc kubenswrapper[4911]: I0929 21:39:00.020629 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/201b73b2-db5b-4496-81fa-7d3c87acbf2f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "201b73b2-db5b-4496-81fa-7d3c87acbf2f" (UID: "201b73b2-db5b-4496-81fa-7d3c87acbf2f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:39:00 crc kubenswrapper[4911]: I0929 21:39:00.045399 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/201b73b2-db5b-4496-81fa-7d3c87acbf2f-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 21:39:00 crc kubenswrapper[4911]: I0929 21:39:00.045431 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4gt8n\" (UniqueName: \"kubernetes.io/projected/201b73b2-db5b-4496-81fa-7d3c87acbf2f-kube-api-access-4gt8n\") on node \"crc\" DevicePath \"\"" Sep 29 21:39:00 crc kubenswrapper[4911]: I0929 21:39:00.045478 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/201b73b2-db5b-4496-81fa-7d3c87acbf2f-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 21:39:00 crc kubenswrapper[4911]: I0929 21:39:00.457374 4911 generic.go:334] "Generic (PLEG): container finished" podID="201b73b2-db5b-4496-81fa-7d3c87acbf2f" containerID="248db6b76dad60f394acf74bdac8808d1ae30fdc7c3945b9e6a07db5d65eb47a" exitCode=0 Sep 29 21:39:00 crc kubenswrapper[4911]: I0929 21:39:00.457416 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m8zzr" event={"ID":"201b73b2-db5b-4496-81fa-7d3c87acbf2f","Type":"ContainerDied","Data":"248db6b76dad60f394acf74bdac8808d1ae30fdc7c3945b9e6a07db5d65eb47a"} Sep 29 21:39:00 crc kubenswrapper[4911]: I0929 21:39:00.457427 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m8zzr" Sep 29 21:39:00 crc kubenswrapper[4911]: I0929 21:39:00.457439 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m8zzr" event={"ID":"201b73b2-db5b-4496-81fa-7d3c87acbf2f","Type":"ContainerDied","Data":"990d7967be4577255d98c68bc2759e1365ad716a5322e4f6c57d1f9788dda863"} Sep 29 21:39:00 crc kubenswrapper[4911]: I0929 21:39:00.457455 4911 scope.go:117] "RemoveContainer" containerID="248db6b76dad60f394acf74bdac8808d1ae30fdc7c3945b9e6a07db5d65eb47a" Sep 29 21:39:00 crc kubenswrapper[4911]: I0929 21:39:00.484115 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-m8zzr"] Sep 29 21:39:00 crc kubenswrapper[4911]: I0929 21:39:00.488920 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-m8zzr"] Sep 29 21:39:00 crc kubenswrapper[4911]: I0929 21:39:00.574020 4911 scope.go:117] "RemoveContainer" containerID="247bcc847ce9800cc1d6dfa8b02b4a5dcdc7c851cbe0bbec9f6ca8dbd3e29127" Sep 29 21:39:00 crc kubenswrapper[4911]: I0929 21:39:00.688548 4911 scope.go:117] "RemoveContainer" containerID="5a7cd834ac5d7ae5c74f96925082ac087bc2ae0865c3f0417e30388fdd5c93aa" Sep 29 21:39:00 crc kubenswrapper[4911]: I0929 21:39:00.722999 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="201b73b2-db5b-4496-81fa-7d3c87acbf2f" path="/var/lib/kubelet/pods/201b73b2-db5b-4496-81fa-7d3c87acbf2f/volumes" Sep 29 21:39:01 crc kubenswrapper[4911]: I0929 21:39:01.159874 4911 scope.go:117] "RemoveContainer" containerID="248db6b76dad60f394acf74bdac8808d1ae30fdc7c3945b9e6a07db5d65eb47a" Sep 29 21:39:01 crc kubenswrapper[4911]: E0929 21:39:01.160500 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"248db6b76dad60f394acf74bdac8808d1ae30fdc7c3945b9e6a07db5d65eb47a\": container with ID starting with 248db6b76dad60f394acf74bdac8808d1ae30fdc7c3945b9e6a07db5d65eb47a not found: ID does not exist" containerID="248db6b76dad60f394acf74bdac8808d1ae30fdc7c3945b9e6a07db5d65eb47a" Sep 29 21:39:01 crc kubenswrapper[4911]: I0929 21:39:01.160530 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"248db6b76dad60f394acf74bdac8808d1ae30fdc7c3945b9e6a07db5d65eb47a"} err="failed to get container status \"248db6b76dad60f394acf74bdac8808d1ae30fdc7c3945b9e6a07db5d65eb47a\": rpc error: code = NotFound desc = could not find container \"248db6b76dad60f394acf74bdac8808d1ae30fdc7c3945b9e6a07db5d65eb47a\": container with ID starting with 248db6b76dad60f394acf74bdac8808d1ae30fdc7c3945b9e6a07db5d65eb47a not found: ID does not exist" Sep 29 21:39:01 crc kubenswrapper[4911]: I0929 21:39:01.160552 4911 scope.go:117] "RemoveContainer" containerID="247bcc847ce9800cc1d6dfa8b02b4a5dcdc7c851cbe0bbec9f6ca8dbd3e29127" Sep 29 21:39:01 crc kubenswrapper[4911]: E0929 21:39:01.160931 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"247bcc847ce9800cc1d6dfa8b02b4a5dcdc7c851cbe0bbec9f6ca8dbd3e29127\": container with ID starting with 247bcc847ce9800cc1d6dfa8b02b4a5dcdc7c851cbe0bbec9f6ca8dbd3e29127 not found: ID does not exist" containerID="247bcc847ce9800cc1d6dfa8b02b4a5dcdc7c851cbe0bbec9f6ca8dbd3e29127" Sep 29 21:39:01 crc kubenswrapper[4911]: I0929 21:39:01.160959 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"247bcc847ce9800cc1d6dfa8b02b4a5dcdc7c851cbe0bbec9f6ca8dbd3e29127"} err="failed to get container status \"247bcc847ce9800cc1d6dfa8b02b4a5dcdc7c851cbe0bbec9f6ca8dbd3e29127\": rpc error: code = NotFound desc = could not find container \"247bcc847ce9800cc1d6dfa8b02b4a5dcdc7c851cbe0bbec9f6ca8dbd3e29127\": container with ID starting with 247bcc847ce9800cc1d6dfa8b02b4a5dcdc7c851cbe0bbec9f6ca8dbd3e29127 not found: ID does not exist" Sep 29 21:39:01 crc kubenswrapper[4911]: I0929 21:39:01.160974 4911 scope.go:117] "RemoveContainer" containerID="5a7cd834ac5d7ae5c74f96925082ac087bc2ae0865c3f0417e30388fdd5c93aa" Sep 29 21:39:01 crc kubenswrapper[4911]: E0929 21:39:01.161327 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5a7cd834ac5d7ae5c74f96925082ac087bc2ae0865c3f0417e30388fdd5c93aa\": container with ID starting with 5a7cd834ac5d7ae5c74f96925082ac087bc2ae0865c3f0417e30388fdd5c93aa not found: ID does not exist" containerID="5a7cd834ac5d7ae5c74f96925082ac087bc2ae0865c3f0417e30388fdd5c93aa" Sep 29 21:39:01 crc kubenswrapper[4911]: I0929 21:39:01.161383 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5a7cd834ac5d7ae5c74f96925082ac087bc2ae0865c3f0417e30388fdd5c93aa"} err="failed to get container status \"5a7cd834ac5d7ae5c74f96925082ac087bc2ae0865c3f0417e30388fdd5c93aa\": rpc error: code = NotFound desc = could not find container \"5a7cd834ac5d7ae5c74f96925082ac087bc2ae0865c3f0417e30388fdd5c93aa\": container with ID starting with 5a7cd834ac5d7ae5c74f96925082ac087bc2ae0865c3f0417e30388fdd5c93aa not found: ID does not exist" Sep 29 21:39:01 crc kubenswrapper[4911]: I0929 21:39:01.467823 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-55fbd568cd-t2lpx" event={"ID":"c1c21e4d-dd2a-440b-8f66-330667131f49","Type":"ContainerStarted","Data":"8deb9a034c5646f9a5c0c9a2f9f8aedac40cb780683d4f2733846f3c3bd9db6b"} Sep 29 21:39:01 crc kubenswrapper[4911]: I0929 21:39:01.467961 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-55fbd568cd-t2lpx" Sep 29 21:39:01 crc kubenswrapper[4911]: I0929 21:39:01.499440 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-55fbd568cd-t2lpx" podStartSLOduration=1.896939063 podStartE2EDuration="8.499423956s" podCreationTimestamp="2025-09-29 21:38:53 +0000 UTC" firstStartedPulling="2025-09-29 21:38:54.586992217 +0000 UTC m=+812.564104888" lastFinishedPulling="2025-09-29 21:39:01.18947711 +0000 UTC m=+819.166589781" observedRunningTime="2025-09-29 21:39:01.496854694 +0000 UTC m=+819.473967375" watchObservedRunningTime="2025-09-29 21:39:01.499423956 +0000 UTC m=+819.476536627" Sep 29 21:39:04 crc kubenswrapper[4911]: I0929 21:39:04.113874 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-55fbd568cd-t2lpx" Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.654245 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-6ff8b75857-82p2v"] Sep 29 21:39:20 crc kubenswrapper[4911]: E0929 21:39:20.655033 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="201b73b2-db5b-4496-81fa-7d3c87acbf2f" containerName="registry-server" Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.655050 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="201b73b2-db5b-4496-81fa-7d3c87acbf2f" containerName="registry-server" Sep 29 21:39:20 crc kubenswrapper[4911]: E0929 21:39:20.655069 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="201b73b2-db5b-4496-81fa-7d3c87acbf2f" containerName="extract-content" Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.655078 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="201b73b2-db5b-4496-81fa-7d3c87acbf2f" containerName="extract-content" Sep 29 21:39:20 crc kubenswrapper[4911]: E0929 21:39:20.655097 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="201b73b2-db5b-4496-81fa-7d3c87acbf2f" containerName="extract-utilities" Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.655106 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="201b73b2-db5b-4496-81fa-7d3c87acbf2f" containerName="extract-utilities" Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.655242 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="201b73b2-db5b-4496-81fa-7d3c87acbf2f" containerName="registry-server" Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.656047 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-82p2v" Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.658923 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-mk28k" Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.659454 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-644bddb6d8-96r7j"] Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.660477 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-96r7j" Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.662687 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-cls6z" Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.672006 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-6ff8b75857-82p2v"] Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.676296 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-84f4f7b77b-hm6c8"] Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.677275 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-hm6c8" Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.682641 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-qdpzj" Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.682916 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-644bddb6d8-96r7j"] Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.722762 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-84f4f7b77b-hm6c8"] Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.731411 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-84958c4d49-b8mfd"] Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.732460 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-b8mfd" Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.732484 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5grcz\" (UniqueName: \"kubernetes.io/projected/a9ab461d-94d1-487f-9854-a5e7e80f88ed-kube-api-access-5grcz\") pod \"barbican-operator-controller-manager-6ff8b75857-82p2v\" (UID: \"a9ab461d-94d1-487f-9854-a5e7e80f88ed\") " pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-82p2v" Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.737087 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-vj226" Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.743179 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-5d889d78cf-ssgn9"] Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.744326 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-ssgn9" Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.747058 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-jm77w" Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.760076 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-84958c4d49-b8mfd"] Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.771319 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5d889d78cf-ssgn9"] Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.776323 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-7d857cc749-ps6zb"] Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.777343 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-ps6zb" Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.779568 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.779727 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-7mvd8" Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.784390 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-9f4696d94-fgfrh"] Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.785401 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-fgfrh" Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.790638 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-7d857cc749-ps6zb"] Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.791120 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-nr29b" Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.793761 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-9f4696d94-fgfrh"] Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.824759 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-7975b88857-2rlqk"] Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.825907 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-2rlqk" Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.833202 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-r6wxx" Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.833985 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wl2mt\" (UniqueName: \"kubernetes.io/projected/7b9b9966-b82d-481d-9ca9-062a883ffd1c-kube-api-access-wl2mt\") pod \"glance-operator-controller-manager-84958c4d49-b8mfd\" (UID: \"7b9b9966-b82d-481d-9ca9-062a883ffd1c\") " pod="openstack-operators/glance-operator-controller-manager-84958c4d49-b8mfd" Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.834054 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kqnlf\" (UniqueName: \"kubernetes.io/projected/10f051aa-3e26-4a9b-89a1-d5bd8e58ba16-kube-api-access-kqnlf\") pod \"heat-operator-controller-manager-5d889d78cf-ssgn9\" (UID: \"10f051aa-3e26-4a9b-89a1-d5bd8e58ba16\") " pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-ssgn9" Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.834127 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5grcz\" (UniqueName: \"kubernetes.io/projected/a9ab461d-94d1-487f-9854-a5e7e80f88ed-kube-api-access-5grcz\") pod \"barbican-operator-controller-manager-6ff8b75857-82p2v\" (UID: \"a9ab461d-94d1-487f-9854-a5e7e80f88ed\") " pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-82p2v" Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.834151 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nfszs\" (UniqueName: \"kubernetes.io/projected/97810e9c-ad62-45d0-a644-1362696f0087-kube-api-access-nfszs\") pod \"designate-operator-controller-manager-84f4f7b77b-hm6c8\" (UID: \"97810e9c-ad62-45d0-a644-1362696f0087\") " pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-hm6c8" Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.834209 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-krbcz\" (UniqueName: \"kubernetes.io/projected/0b825f85-6069-4d69-b1a8-9404542556cb-kube-api-access-krbcz\") pod \"cinder-operator-controller-manager-644bddb6d8-96r7j\" (UID: \"0b825f85-6069-4d69-b1a8-9404542556cb\") " pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-96r7j" Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.836606 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-5bd55b4bff-cnn7f"] Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.839893 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-cnn7f" Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.841954 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-6xsgp" Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.850878 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-7975b88857-2rlqk"] Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.855819 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-5bd55b4bff-cnn7f"] Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.876938 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-6d68dbc695-l6zpg"] Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.878155 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-l6zpg" Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.880472 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-88c7-zrp9j"] Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.881642 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-88c7-zrp9j" Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.881666 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-vzmcw" Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.883365 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-cf9fw" Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.886946 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-6d68dbc695-l6zpg"] Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.890588 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5grcz\" (UniqueName: \"kubernetes.io/projected/a9ab461d-94d1-487f-9854-a5e7e80f88ed-kube-api-access-5grcz\") pod \"barbican-operator-controller-manager-6ff8b75857-82p2v\" (UID: \"a9ab461d-94d1-487f-9854-a5e7e80f88ed\") " pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-82p2v" Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.900096 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-88c7-zrp9j"] Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.906854 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-64d7b59854-dj2hs"] Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.908339 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-dj2hs" Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.912800 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-64d7b59854-dj2hs"] Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.918187 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-rt8lf" Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.935781 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-krbcz\" (UniqueName: \"kubernetes.io/projected/0b825f85-6069-4d69-b1a8-9404542556cb-kube-api-access-krbcz\") pod \"cinder-operator-controller-manager-644bddb6d8-96r7j\" (UID: \"0b825f85-6069-4d69-b1a8-9404542556cb\") " pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-96r7j" Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.935853 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rlfkh\" (UniqueName: \"kubernetes.io/projected/3dff6b09-12d0-462d-8558-175673f2ee0e-kube-api-access-rlfkh\") pod \"infra-operator-controller-manager-7d857cc749-ps6zb\" (UID: \"3dff6b09-12d0-462d-8558-175673f2ee0e\") " pod="openstack-operators/infra-operator-controller-manager-7d857cc749-ps6zb" Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.935896 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bc9wg\" (UniqueName: \"kubernetes.io/projected/c30e2b8c-3f72-4e6f-a5e4-ca27d53f5194-kube-api-access-bc9wg\") pod \"ironic-operator-controller-manager-7975b88857-2rlqk\" (UID: \"c30e2b8c-3f72-4e6f-a5e4-ca27d53f5194\") " pod="openstack-operators/ironic-operator-controller-manager-7975b88857-2rlqk" Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.935915 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wl2mt\" (UniqueName: \"kubernetes.io/projected/7b9b9966-b82d-481d-9ca9-062a883ffd1c-kube-api-access-wl2mt\") pod \"glance-operator-controller-manager-84958c4d49-b8mfd\" (UID: \"7b9b9966-b82d-481d-9ca9-062a883ffd1c\") " pod="openstack-operators/glance-operator-controller-manager-84958c4d49-b8mfd" Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.935938 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3dff6b09-12d0-462d-8558-175673f2ee0e-cert\") pod \"infra-operator-controller-manager-7d857cc749-ps6zb\" (UID: \"3dff6b09-12d0-462d-8558-175673f2ee0e\") " pod="openstack-operators/infra-operator-controller-manager-7d857cc749-ps6zb" Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.936268 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mhfq2\" (UniqueName: \"kubernetes.io/projected/0ffadab1-e334-4ec5-8e50-2a589230e880-kube-api-access-mhfq2\") pod \"keystone-operator-controller-manager-5bd55b4bff-cnn7f\" (UID: \"0ffadab1-e334-4ec5-8e50-2a589230e880\") " pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-cnn7f" Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.936303 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kqnlf\" (UniqueName: \"kubernetes.io/projected/10f051aa-3e26-4a9b-89a1-d5bd8e58ba16-kube-api-access-kqnlf\") pod \"heat-operator-controller-manager-5d889d78cf-ssgn9\" (UID: \"10f051aa-3e26-4a9b-89a1-d5bd8e58ba16\") " pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-ssgn9" Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.936352 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t7lzc\" (UniqueName: \"kubernetes.io/projected/571a100e-c479-4a69-a8e3-8c7b1abe1bc5-kube-api-access-t7lzc\") pod \"horizon-operator-controller-manager-9f4696d94-fgfrh\" (UID: \"571a100e-c479-4a69-a8e3-8c7b1abe1bc5\") " pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-fgfrh" Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.936381 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nfszs\" (UniqueName: \"kubernetes.io/projected/97810e9c-ad62-45d0-a644-1362696f0087-kube-api-access-nfszs\") pod \"designate-operator-controller-manager-84f4f7b77b-hm6c8\" (UID: \"97810e9c-ad62-45d0-a644-1362696f0087\") " pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-hm6c8" Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.944708 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-c7c776c96-vdx55"] Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.949309 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-vdx55" Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.954665 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-c7c776c96-vdx55"] Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.960811 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-6ktww" Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.964230 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wl2mt\" (UniqueName: \"kubernetes.io/projected/7b9b9966-b82d-481d-9ca9-062a883ffd1c-kube-api-access-wl2mt\") pod \"glance-operator-controller-manager-84958c4d49-b8mfd\" (UID: \"7b9b9966-b82d-481d-9ca9-062a883ffd1c\") " pod="openstack-operators/glance-operator-controller-manager-84958c4d49-b8mfd" Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.964290 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kqnlf\" (UniqueName: \"kubernetes.io/projected/10f051aa-3e26-4a9b-89a1-d5bd8e58ba16-kube-api-access-kqnlf\") pod \"heat-operator-controller-manager-5d889d78cf-ssgn9\" (UID: \"10f051aa-3e26-4a9b-89a1-d5bd8e58ba16\") " pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-ssgn9" Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.968403 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-lvplw"] Sep 29 21:39:20 crc kubenswrapper[4911]: I0929 21:39:20.981302 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-82p2v" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.006229 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-krbcz\" (UniqueName: \"kubernetes.io/projected/0b825f85-6069-4d69-b1a8-9404542556cb-kube-api-access-krbcz\") pod \"cinder-operator-controller-manager-644bddb6d8-96r7j\" (UID: \"0b825f85-6069-4d69-b1a8-9404542556cb\") " pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-96r7j" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.006938 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nfszs\" (UniqueName: \"kubernetes.io/projected/97810e9c-ad62-45d0-a644-1362696f0087-kube-api-access-nfszs\") pod \"designate-operator-controller-manager-84f4f7b77b-hm6c8\" (UID: \"97810e9c-ad62-45d0-a644-1362696f0087\") " pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-hm6c8" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.015817 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-lvplw"] Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.015930 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-lvplw" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.025542 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-kmfnz" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.032666 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-9976ff44c-dbm6k"] Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.033750 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-dbm6k" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.035767 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-lkbtk" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.037239 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m5d5s\" (UniqueName: \"kubernetes.io/projected/73f9cfcd-2c46-4070-9734-4ec07c824a9f-kube-api-access-m5d5s\") pod \"manila-operator-controller-manager-6d68dbc695-l6zpg\" (UID: \"73f9cfcd-2c46-4070-9734-4ec07c824a9f\") " pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-l6zpg" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.037294 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t7lzc\" (UniqueName: \"kubernetes.io/projected/571a100e-c479-4a69-a8e3-8c7b1abe1bc5-kube-api-access-t7lzc\") pod \"horizon-operator-controller-manager-9f4696d94-fgfrh\" (UID: \"571a100e-c479-4a69-a8e3-8c7b1abe1bc5\") " pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-fgfrh" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.037319 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9prh7\" (UniqueName: \"kubernetes.io/projected/66254365-9a3e-4101-beff-6bcbdfe57222-kube-api-access-9prh7\") pod \"mariadb-operator-controller-manager-88c7-zrp9j\" (UID: \"66254365-9a3e-4101-beff-6bcbdfe57222\") " pod="openstack-operators/mariadb-operator-controller-manager-88c7-zrp9j" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.037351 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rlfkh\" (UniqueName: \"kubernetes.io/projected/3dff6b09-12d0-462d-8558-175673f2ee0e-kube-api-access-rlfkh\") pod \"infra-operator-controller-manager-7d857cc749-ps6zb\" (UID: \"3dff6b09-12d0-462d-8558-175673f2ee0e\") " pod="openstack-operators/infra-operator-controller-manager-7d857cc749-ps6zb" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.037370 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m8hxn\" (UniqueName: \"kubernetes.io/projected/09452e7c-7e3f-4ca5-ae5c-c321e2a581ee-kube-api-access-m8hxn\") pod \"nova-operator-controller-manager-c7c776c96-vdx55\" (UID: \"09452e7c-7e3f-4ca5-ae5c-c321e2a581ee\") " pod="openstack-operators/nova-operator-controller-manager-c7c776c96-vdx55" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.037399 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bc9wg\" (UniqueName: \"kubernetes.io/projected/c30e2b8c-3f72-4e6f-a5e4-ca27d53f5194-kube-api-access-bc9wg\") pod \"ironic-operator-controller-manager-7975b88857-2rlqk\" (UID: \"c30e2b8c-3f72-4e6f-a5e4-ca27d53f5194\") " pod="openstack-operators/ironic-operator-controller-manager-7975b88857-2rlqk" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.037420 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3dff6b09-12d0-462d-8558-175673f2ee0e-cert\") pod \"infra-operator-controller-manager-7d857cc749-ps6zb\" (UID: \"3dff6b09-12d0-462d-8558-175673f2ee0e\") " pod="openstack-operators/infra-operator-controller-manager-7d857cc749-ps6zb" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.037450 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mhfq2\" (UniqueName: \"kubernetes.io/projected/0ffadab1-e334-4ec5-8e50-2a589230e880-kube-api-access-mhfq2\") pod \"keystone-operator-controller-manager-5bd55b4bff-cnn7f\" (UID: \"0ffadab1-e334-4ec5-8e50-2a589230e880\") " pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-cnn7f" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.037467 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dhppj\" (UniqueName: \"kubernetes.io/projected/89b4d71d-9da4-43af-a8a9-54c89c771c22-kube-api-access-dhppj\") pod \"neutron-operator-controller-manager-64d7b59854-dj2hs\" (UID: \"89b4d71d-9da4-43af-a8a9-54c89c771c22\") " pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-dj2hs" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.044443 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-j9x29"] Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.045528 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-9976ff44c-dbm6k"] Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.045610 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-j9x29" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.047187 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.048965 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-pdjpm" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.051114 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-589c58c6c-zh4ln"] Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.052190 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-zh4ln" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.054099 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-b8mfd" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.054690 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-j9x29"] Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.056365 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3dff6b09-12d0-462d-8558-175673f2ee0e-cert\") pod \"infra-operator-controller-manager-7d857cc749-ps6zb\" (UID: \"3dff6b09-12d0-462d-8558-175673f2ee0e\") " pod="openstack-operators/infra-operator-controller-manager-7d857cc749-ps6zb" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.057560 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-8phxq" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.070011 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-ssgn9" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.076025 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mhfq2\" (UniqueName: \"kubernetes.io/projected/0ffadab1-e334-4ec5-8e50-2a589230e880-kube-api-access-mhfq2\") pod \"keystone-operator-controller-manager-5bd55b4bff-cnn7f\" (UID: \"0ffadab1-e334-4ec5-8e50-2a589230e880\") " pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-cnn7f" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.080157 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rlfkh\" (UniqueName: \"kubernetes.io/projected/3dff6b09-12d0-462d-8558-175673f2ee0e-kube-api-access-rlfkh\") pod \"infra-operator-controller-manager-7d857cc749-ps6zb\" (UID: \"3dff6b09-12d0-462d-8558-175673f2ee0e\") " pod="openstack-operators/infra-operator-controller-manager-7d857cc749-ps6zb" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.080272 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bc9wg\" (UniqueName: \"kubernetes.io/projected/c30e2b8c-3f72-4e6f-a5e4-ca27d53f5194-kube-api-access-bc9wg\") pod \"ironic-operator-controller-manager-7975b88857-2rlqk\" (UID: \"c30e2b8c-3f72-4e6f-a5e4-ca27d53f5194\") " pod="openstack-operators/ironic-operator-controller-manager-7975b88857-2rlqk" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.081454 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t7lzc\" (UniqueName: \"kubernetes.io/projected/571a100e-c479-4a69-a8e3-8c7b1abe1bc5-kube-api-access-t7lzc\") pod \"horizon-operator-controller-manager-9f4696d94-fgfrh\" (UID: \"571a100e-c479-4a69-a8e3-8c7b1abe1bc5\") " pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-fgfrh" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.096071 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-589c58c6c-zh4ln"] Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.096717 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-ps6zb" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.106177 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-fgfrh" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.108105 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-bc7dc7bd9-k6296"] Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.109606 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-k6296" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.113339 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-xw2wh" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.140860 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z87dn\" (UniqueName: \"kubernetes.io/projected/a9b7bc05-a0dc-421b-9e13-b00f3b8759f2-kube-api-access-z87dn\") pod \"ovn-operator-controller-manager-9976ff44c-dbm6k\" (UID: \"a9b7bc05-a0dc-421b-9e13-b00f3b8759f2\") " pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-dbm6k" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.140915 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m8hxn\" (UniqueName: \"kubernetes.io/projected/09452e7c-7e3f-4ca5-ae5c-c321e2a581ee-kube-api-access-m8hxn\") pod \"nova-operator-controller-manager-c7c776c96-vdx55\" (UID: \"09452e7c-7e3f-4ca5-ae5c-c321e2a581ee\") " pod="openstack-operators/nova-operator-controller-manager-c7c776c96-vdx55" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.140954 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e6607b81-fac2-4fb0-a19c-a4b01eef9fd2-cert\") pod \"openstack-baremetal-operator-controller-manager-6d776955-j9x29\" (UID: \"e6607b81-fac2-4fb0-a19c-a4b01eef9fd2\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-j9x29" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.140987 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dhppj\" (UniqueName: \"kubernetes.io/projected/89b4d71d-9da4-43af-a8a9-54c89c771c22-kube-api-access-dhppj\") pod \"neutron-operator-controller-manager-64d7b59854-dj2hs\" (UID: \"89b4d71d-9da4-43af-a8a9-54c89c771c22\") " pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-dj2hs" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.141007 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w42cl\" (UniqueName: \"kubernetes.io/projected/9049bfc1-ea91-4483-9f36-3ffaa5f250c7-kube-api-access-w42cl\") pod \"placement-operator-controller-manager-589c58c6c-zh4ln\" (UID: \"9049bfc1-ea91-4483-9f36-3ffaa5f250c7\") " pod="openstack-operators/placement-operator-controller-manager-589c58c6c-zh4ln" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.141033 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dnjx5\" (UniqueName: \"kubernetes.io/projected/afd159c8-b67d-46c6-8417-16f505314359-kube-api-access-dnjx5\") pod \"octavia-operator-controller-manager-76fcc6dc7c-lvplw\" (UID: \"afd159c8-b67d-46c6-8417-16f505314359\") " pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-lvplw" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.141052 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m5d5s\" (UniqueName: \"kubernetes.io/projected/73f9cfcd-2c46-4070-9734-4ec07c824a9f-kube-api-access-m5d5s\") pod \"manila-operator-controller-manager-6d68dbc695-l6zpg\" (UID: \"73f9cfcd-2c46-4070-9734-4ec07c824a9f\") " pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-l6zpg" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.141079 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ctdpk\" (UniqueName: \"kubernetes.io/projected/f86ad598-06ae-4ff1-90fc-1770d3b9797c-kube-api-access-ctdpk\") pod \"swift-operator-controller-manager-bc7dc7bd9-k6296\" (UID: \"f86ad598-06ae-4ff1-90fc-1770d3b9797c\") " pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-k6296" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.141099 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jdm7m\" (UniqueName: \"kubernetes.io/projected/e6607b81-fac2-4fb0-a19c-a4b01eef9fd2-kube-api-access-jdm7m\") pod \"openstack-baremetal-operator-controller-manager-6d776955-j9x29\" (UID: \"e6607b81-fac2-4fb0-a19c-a4b01eef9fd2\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-j9x29" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.141130 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9prh7\" (UniqueName: \"kubernetes.io/projected/66254365-9a3e-4101-beff-6bcbdfe57222-kube-api-access-9prh7\") pod \"mariadb-operator-controller-manager-88c7-zrp9j\" (UID: \"66254365-9a3e-4101-beff-6bcbdfe57222\") " pod="openstack-operators/mariadb-operator-controller-manager-88c7-zrp9j" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.150245 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-2rlqk" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.158900 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-bc7dc7bd9-k6296"] Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.170645 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-66c64d68d6-mn744"] Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.172465 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-66c64d68d6-mn744" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.175389 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m8hxn\" (UniqueName: \"kubernetes.io/projected/09452e7c-7e3f-4ca5-ae5c-c321e2a581ee-kube-api-access-m8hxn\") pod \"nova-operator-controller-manager-c7c776c96-vdx55\" (UID: \"09452e7c-7e3f-4ca5-ae5c-c321e2a581ee\") " pod="openstack-operators/nova-operator-controller-manager-c7c776c96-vdx55" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.176911 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-wfcnr" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.177419 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m5d5s\" (UniqueName: \"kubernetes.io/projected/73f9cfcd-2c46-4070-9734-4ec07c824a9f-kube-api-access-m5d5s\") pod \"manila-operator-controller-manager-6d68dbc695-l6zpg\" (UID: \"73f9cfcd-2c46-4070-9734-4ec07c824a9f\") " pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-l6zpg" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.177987 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dhppj\" (UniqueName: \"kubernetes.io/projected/89b4d71d-9da4-43af-a8a9-54c89c771c22-kube-api-access-dhppj\") pod \"neutron-operator-controller-manager-64d7b59854-dj2hs\" (UID: \"89b4d71d-9da4-43af-a8a9-54c89c771c22\") " pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-dj2hs" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.180984 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-cnn7f" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.185918 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-66c64d68d6-mn744"] Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.186851 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9prh7\" (UniqueName: \"kubernetes.io/projected/66254365-9a3e-4101-beff-6bcbdfe57222-kube-api-access-9prh7\") pod \"mariadb-operator-controller-manager-88c7-zrp9j\" (UID: \"66254365-9a3e-4101-beff-6bcbdfe57222\") " pod="openstack-operators/mariadb-operator-controller-manager-88c7-zrp9j" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.212706 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-f66b554c6-d6xcc"] Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.215571 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-f66b554c6-d6xcc" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.224202 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-l55kw" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.224906 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-l6zpg" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.235088 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-f66b554c6-d6xcc"] Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.240722 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-88c7-zrp9j" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.242494 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z87dn\" (UniqueName: \"kubernetes.io/projected/a9b7bc05-a0dc-421b-9e13-b00f3b8759f2-kube-api-access-z87dn\") pod \"ovn-operator-controller-manager-9976ff44c-dbm6k\" (UID: \"a9b7bc05-a0dc-421b-9e13-b00f3b8759f2\") " pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-dbm6k" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.242567 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b5tx8\" (UniqueName: \"kubernetes.io/projected/95ec5351-b5ce-417e-ba1c-dcd76592fa6b-kube-api-access-b5tx8\") pod \"test-operator-controller-manager-f66b554c6-d6xcc\" (UID: \"95ec5351-b5ce-417e-ba1c-dcd76592fa6b\") " pod="openstack-operators/test-operator-controller-manager-f66b554c6-d6xcc" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.242596 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e6607b81-fac2-4fb0-a19c-a4b01eef9fd2-cert\") pod \"openstack-baremetal-operator-controller-manager-6d776955-j9x29\" (UID: \"e6607b81-fac2-4fb0-a19c-a4b01eef9fd2\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-j9x29" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.242628 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w42cl\" (UniqueName: \"kubernetes.io/projected/9049bfc1-ea91-4483-9f36-3ffaa5f250c7-kube-api-access-w42cl\") pod \"placement-operator-controller-manager-589c58c6c-zh4ln\" (UID: \"9049bfc1-ea91-4483-9f36-3ffaa5f250c7\") " pod="openstack-operators/placement-operator-controller-manager-589c58c6c-zh4ln" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.242656 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dnjx5\" (UniqueName: \"kubernetes.io/projected/afd159c8-b67d-46c6-8417-16f505314359-kube-api-access-dnjx5\") pod \"octavia-operator-controller-manager-76fcc6dc7c-lvplw\" (UID: \"afd159c8-b67d-46c6-8417-16f505314359\") " pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-lvplw" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.242676 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ctdpk\" (UniqueName: \"kubernetes.io/projected/f86ad598-06ae-4ff1-90fc-1770d3b9797c-kube-api-access-ctdpk\") pod \"swift-operator-controller-manager-bc7dc7bd9-k6296\" (UID: \"f86ad598-06ae-4ff1-90fc-1770d3b9797c\") " pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-k6296" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.242696 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jdm7m\" (UniqueName: \"kubernetes.io/projected/e6607b81-fac2-4fb0-a19c-a4b01eef9fd2-kube-api-access-jdm7m\") pod \"openstack-baremetal-operator-controller-manager-6d776955-j9x29\" (UID: \"e6607b81-fac2-4fb0-a19c-a4b01eef9fd2\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-j9x29" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.242714 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gwh8j\" (UniqueName: \"kubernetes.io/projected/e6468afc-47e4-4281-9049-9209a4eb8d73-kube-api-access-gwh8j\") pod \"telemetry-operator-controller-manager-66c64d68d6-mn744\" (UID: \"e6468afc-47e4-4281-9049-9209a4eb8d73\") " pod="openstack-operators/telemetry-operator-controller-manager-66c64d68d6-mn744" Sep 29 21:39:21 crc kubenswrapper[4911]: E0929 21:39:21.243080 4911 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Sep 29 21:39:21 crc kubenswrapper[4911]: E0929 21:39:21.243143 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e6607b81-fac2-4fb0-a19c-a4b01eef9fd2-cert podName:e6607b81-fac2-4fb0-a19c-a4b01eef9fd2 nodeName:}" failed. No retries permitted until 2025-09-29 21:39:21.743126288 +0000 UTC m=+839.720238959 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/e6607b81-fac2-4fb0-a19c-a4b01eef9fd2-cert") pod "openstack-baremetal-operator-controller-manager-6d776955-j9x29" (UID: "e6607b81-fac2-4fb0-a19c-a4b01eef9fd2") : secret "openstack-baremetal-operator-webhook-server-cert" not found Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.255810 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-dj2hs" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.267774 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-76669f99c-2gcq4"] Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.269365 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ctdpk\" (UniqueName: \"kubernetes.io/projected/f86ad598-06ae-4ff1-90fc-1770d3b9797c-kube-api-access-ctdpk\") pod \"swift-operator-controller-manager-bc7dc7bd9-k6296\" (UID: \"f86ad598-06ae-4ff1-90fc-1770d3b9797c\") " pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-k6296" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.269408 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-2gcq4" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.274817 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-xkkpb" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.278513 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-76669f99c-2gcq4"] Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.283710 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w42cl\" (UniqueName: \"kubernetes.io/projected/9049bfc1-ea91-4483-9f36-3ffaa5f250c7-kube-api-access-w42cl\") pod \"placement-operator-controller-manager-589c58c6c-zh4ln\" (UID: \"9049bfc1-ea91-4483-9f36-3ffaa5f250c7\") " pod="openstack-operators/placement-operator-controller-manager-589c58c6c-zh4ln" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.285683 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z87dn\" (UniqueName: \"kubernetes.io/projected/a9b7bc05-a0dc-421b-9e13-b00f3b8759f2-kube-api-access-z87dn\") pod \"ovn-operator-controller-manager-9976ff44c-dbm6k\" (UID: \"a9b7bc05-a0dc-421b-9e13-b00f3b8759f2\") " pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-dbm6k" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.286897 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jdm7m\" (UniqueName: \"kubernetes.io/projected/e6607b81-fac2-4fb0-a19c-a4b01eef9fd2-kube-api-access-jdm7m\") pod \"openstack-baremetal-operator-controller-manager-6d776955-j9x29\" (UID: \"e6607b81-fac2-4fb0-a19c-a4b01eef9fd2\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-j9x29" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.293963 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-96r7j" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.295127 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dnjx5\" (UniqueName: \"kubernetes.io/projected/afd159c8-b67d-46c6-8417-16f505314359-kube-api-access-dnjx5\") pod \"octavia-operator-controller-manager-76fcc6dc7c-lvplw\" (UID: \"afd159c8-b67d-46c6-8417-16f505314359\") " pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-lvplw" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.314004 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-hm6c8" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.344071 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xnm7f\" (UniqueName: \"kubernetes.io/projected/4365babc-aa2d-4609-880b-f036dc6c367b-kube-api-access-xnm7f\") pod \"watcher-operator-controller-manager-76669f99c-2gcq4\" (UID: \"4365babc-aa2d-4609-880b-f036dc6c367b\") " pod="openstack-operators/watcher-operator-controller-manager-76669f99c-2gcq4" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.344149 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gwh8j\" (UniqueName: \"kubernetes.io/projected/e6468afc-47e4-4281-9049-9209a4eb8d73-kube-api-access-gwh8j\") pod \"telemetry-operator-controller-manager-66c64d68d6-mn744\" (UID: \"e6468afc-47e4-4281-9049-9209a4eb8d73\") " pod="openstack-operators/telemetry-operator-controller-manager-66c64d68d6-mn744" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.344232 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b5tx8\" (UniqueName: \"kubernetes.io/projected/95ec5351-b5ce-417e-ba1c-dcd76592fa6b-kube-api-access-b5tx8\") pod \"test-operator-controller-manager-f66b554c6-d6xcc\" (UID: \"95ec5351-b5ce-417e-ba1c-dcd76592fa6b\") " pod="openstack-operators/test-operator-controller-manager-f66b554c6-d6xcc" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.361547 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-vdx55" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.388073 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-lvplw" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.388318 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gwh8j\" (UniqueName: \"kubernetes.io/projected/e6468afc-47e4-4281-9049-9209a4eb8d73-kube-api-access-gwh8j\") pod \"telemetry-operator-controller-manager-66c64d68d6-mn744\" (UID: \"e6468afc-47e4-4281-9049-9209a4eb8d73\") " pod="openstack-operators/telemetry-operator-controller-manager-66c64d68d6-mn744" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.388532 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b5tx8\" (UniqueName: \"kubernetes.io/projected/95ec5351-b5ce-417e-ba1c-dcd76592fa6b-kube-api-access-b5tx8\") pod \"test-operator-controller-manager-f66b554c6-d6xcc\" (UID: \"95ec5351-b5ce-417e-ba1c-dcd76592fa6b\") " pod="openstack-operators/test-operator-controller-manager-f66b554c6-d6xcc" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.391339 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-7f894b9c96-4p2kz"] Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.392325 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-7f894b9c96-4p2kz" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.396006 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.397749 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-fmkl4" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.402164 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-dbm6k" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.413578 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-7f894b9c96-4p2kz"] Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.424100 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-zh4ln" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.438601 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-k6296" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.443706 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-nh8dq"] Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.444482 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-nh8dq" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.445150 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xnm7f\" (UniqueName: \"kubernetes.io/projected/4365babc-aa2d-4609-880b-f036dc6c367b-kube-api-access-xnm7f\") pod \"watcher-operator-controller-manager-76669f99c-2gcq4\" (UID: \"4365babc-aa2d-4609-880b-f036dc6c367b\") " pod="openstack-operators/watcher-operator-controller-manager-76669f99c-2gcq4" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.445197 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2eba2279-aec7-4ad5-83d7-2ffd190b17e6-cert\") pod \"openstack-operator-controller-manager-7f894b9c96-4p2kz\" (UID: \"2eba2279-aec7-4ad5-83d7-2ffd190b17e6\") " pod="openstack-operators/openstack-operator-controller-manager-7f894b9c96-4p2kz" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.445237 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5lf2m\" (UniqueName: \"kubernetes.io/projected/2eba2279-aec7-4ad5-83d7-2ffd190b17e6-kube-api-access-5lf2m\") pod \"openstack-operator-controller-manager-7f894b9c96-4p2kz\" (UID: \"2eba2279-aec7-4ad5-83d7-2ffd190b17e6\") " pod="openstack-operators/openstack-operator-controller-manager-7f894b9c96-4p2kz" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.465302 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-pthrp" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.498960 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xnm7f\" (UniqueName: \"kubernetes.io/projected/4365babc-aa2d-4609-880b-f036dc6c367b-kube-api-access-xnm7f\") pod \"watcher-operator-controller-manager-76669f99c-2gcq4\" (UID: \"4365babc-aa2d-4609-880b-f036dc6c367b\") " pod="openstack-operators/watcher-operator-controller-manager-76669f99c-2gcq4" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.499294 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-nh8dq"] Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.507134 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-66c64d68d6-mn744" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.546181 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5lf2m\" (UniqueName: \"kubernetes.io/projected/2eba2279-aec7-4ad5-83d7-2ffd190b17e6-kube-api-access-5lf2m\") pod \"openstack-operator-controller-manager-7f894b9c96-4p2kz\" (UID: \"2eba2279-aec7-4ad5-83d7-2ffd190b17e6\") " pod="openstack-operators/openstack-operator-controller-manager-7f894b9c96-4p2kz" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.546231 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wpxrk\" (UniqueName: \"kubernetes.io/projected/96036e4c-a554-4d6c-8cd8-ef098c91f3a5-kube-api-access-wpxrk\") pod \"rabbitmq-cluster-operator-manager-79d8469568-nh8dq\" (UID: \"96036e4c-a554-4d6c-8cd8-ef098c91f3a5\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-nh8dq" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.546345 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2eba2279-aec7-4ad5-83d7-2ffd190b17e6-cert\") pod \"openstack-operator-controller-manager-7f894b9c96-4p2kz\" (UID: \"2eba2279-aec7-4ad5-83d7-2ffd190b17e6\") " pod="openstack-operators/openstack-operator-controller-manager-7f894b9c96-4p2kz" Sep 29 21:39:21 crc kubenswrapper[4911]: E0929 21:39:21.546457 4911 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Sep 29 21:39:21 crc kubenswrapper[4911]: E0929 21:39:21.546513 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/2eba2279-aec7-4ad5-83d7-2ffd190b17e6-cert podName:2eba2279-aec7-4ad5-83d7-2ffd190b17e6 nodeName:}" failed. No retries permitted until 2025-09-29 21:39:22.046497786 +0000 UTC m=+840.023610457 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/2eba2279-aec7-4ad5-83d7-2ffd190b17e6-cert") pod "openstack-operator-controller-manager-7f894b9c96-4p2kz" (UID: "2eba2279-aec7-4ad5-83d7-2ffd190b17e6") : secret "webhook-server-cert" not found Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.569623 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5lf2m\" (UniqueName: \"kubernetes.io/projected/2eba2279-aec7-4ad5-83d7-2ffd190b17e6-kube-api-access-5lf2m\") pod \"openstack-operator-controller-manager-7f894b9c96-4p2kz\" (UID: \"2eba2279-aec7-4ad5-83d7-2ffd190b17e6\") " pod="openstack-operators/openstack-operator-controller-manager-7f894b9c96-4p2kz" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.613626 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-f66b554c6-d6xcc" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.625719 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-2gcq4" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.646943 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wpxrk\" (UniqueName: \"kubernetes.io/projected/96036e4c-a554-4d6c-8cd8-ef098c91f3a5-kube-api-access-wpxrk\") pod \"rabbitmq-cluster-operator-manager-79d8469568-nh8dq\" (UID: \"96036e4c-a554-4d6c-8cd8-ef098c91f3a5\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-nh8dq" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.676330 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wpxrk\" (UniqueName: \"kubernetes.io/projected/96036e4c-a554-4d6c-8cd8-ef098c91f3a5-kube-api-access-wpxrk\") pod \"rabbitmq-cluster-operator-manager-79d8469568-nh8dq\" (UID: \"96036e4c-a554-4d6c-8cd8-ef098c91f3a5\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-nh8dq" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.748518 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e6607b81-fac2-4fb0-a19c-a4b01eef9fd2-cert\") pod \"openstack-baremetal-operator-controller-manager-6d776955-j9x29\" (UID: \"e6607b81-fac2-4fb0-a19c-a4b01eef9fd2\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-j9x29" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.765132 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e6607b81-fac2-4fb0-a19c-a4b01eef9fd2-cert\") pod \"openstack-baremetal-operator-controller-manager-6d776955-j9x29\" (UID: \"e6607b81-fac2-4fb0-a19c-a4b01eef9fd2\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-j9x29" Sep 29 21:39:21 crc kubenswrapper[4911]: I0929 21:39:21.897285 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-nh8dq" Sep 29 21:39:22 crc kubenswrapper[4911]: I0929 21:39:22.009768 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-j9x29" Sep 29 21:39:22 crc kubenswrapper[4911]: I0929 21:39:22.052077 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2eba2279-aec7-4ad5-83d7-2ffd190b17e6-cert\") pod \"openstack-operator-controller-manager-7f894b9c96-4p2kz\" (UID: \"2eba2279-aec7-4ad5-83d7-2ffd190b17e6\") " pod="openstack-operators/openstack-operator-controller-manager-7f894b9c96-4p2kz" Sep 29 21:39:22 crc kubenswrapper[4911]: E0929 21:39:22.052279 4911 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Sep 29 21:39:22 crc kubenswrapper[4911]: E0929 21:39:22.052336 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/2eba2279-aec7-4ad5-83d7-2ffd190b17e6-cert podName:2eba2279-aec7-4ad5-83d7-2ffd190b17e6 nodeName:}" failed. No retries permitted until 2025-09-29 21:39:23.052319733 +0000 UTC m=+841.029432394 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/2eba2279-aec7-4ad5-83d7-2ffd190b17e6-cert") pod "openstack-operator-controller-manager-7f894b9c96-4p2kz" (UID: "2eba2279-aec7-4ad5-83d7-2ffd190b17e6") : secret "webhook-server-cert" not found Sep 29 21:39:22 crc kubenswrapper[4911]: I0929 21:39:22.452970 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5d889d78cf-ssgn9"] Sep 29 21:39:22 crc kubenswrapper[4911]: I0929 21:39:22.483118 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-6ff8b75857-82p2v"] Sep 29 21:39:22 crc kubenswrapper[4911]: I0929 21:39:22.483945 4911 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 21:39:22 crc kubenswrapper[4911]: I0929 21:39:22.623679 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-ssgn9" event={"ID":"10f051aa-3e26-4a9b-89a1-d5bd8e58ba16","Type":"ContainerStarted","Data":"5ed1ae2e181a753fadc71d610a5651f7cad0cf248e0d5fdce559d5727bd8d149"} Sep 29 21:39:22 crc kubenswrapper[4911]: I0929 21:39:22.624531 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-82p2v" event={"ID":"a9ab461d-94d1-487f-9854-a5e7e80f88ed","Type":"ContainerStarted","Data":"58461ed17c1e96a2c8f41e2a30cfe9b374da423e3b2a80109f78bf896e0a63c8"} Sep 29 21:39:22 crc kubenswrapper[4911]: I0929 21:39:22.639191 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-84958c4d49-b8mfd"] Sep 29 21:39:22 crc kubenswrapper[4911]: W0929 21:39:22.641854 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7b9b9966_b82d_481d_9ca9_062a883ffd1c.slice/crio-d248406422b631426a13cc4c9fd8e7a98e2f49b0b35b0aef8d9791958c457391 WatchSource:0}: Error finding container d248406422b631426a13cc4c9fd8e7a98e2f49b0b35b0aef8d9791958c457391: Status 404 returned error can't find the container with id d248406422b631426a13cc4c9fd8e7a98e2f49b0b35b0aef8d9791958c457391 Sep 29 21:39:22 crc kubenswrapper[4911]: I0929 21:39:22.918828 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-644bddb6d8-96r7j"] Sep 29 21:39:22 crc kubenswrapper[4911]: I0929 21:39:22.927364 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-589c58c6c-zh4ln"] Sep 29 21:39:22 crc kubenswrapper[4911]: W0929 21:39:22.941191 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9049bfc1_ea91_4483_9f36_3ffaa5f250c7.slice/crio-48b86ce018079770e1ca706209bcdcf642c52243a50f38a04e9481a15fcfdc40 WatchSource:0}: Error finding container 48b86ce018079770e1ca706209bcdcf642c52243a50f38a04e9481a15fcfdc40: Status 404 returned error can't find the container with id 48b86ce018079770e1ca706209bcdcf642c52243a50f38a04e9481a15fcfdc40 Sep 29 21:39:22 crc kubenswrapper[4911]: I0929 21:39:22.943955 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-lvplw"] Sep 29 21:39:22 crc kubenswrapper[4911]: I0929 21:39:22.975166 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-6d68dbc695-l6zpg"] Sep 29 21:39:22 crc kubenswrapper[4911]: I0929 21:39:22.980949 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-7975b88857-2rlqk"] Sep 29 21:39:22 crc kubenswrapper[4911]: I0929 21:39:22.985940 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-66c64d68d6-mn744"] Sep 29 21:39:22 crc kubenswrapper[4911]: I0929 21:39:22.990034 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-5bd55b4bff-cnn7f"] Sep 29 21:39:22 crc kubenswrapper[4911]: I0929 21:39:22.994688 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-9f4696d94-fgfrh"] Sep 29 21:39:23 crc kubenswrapper[4911]: I0929 21:39:23.007781 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-7d857cc749-ps6zb"] Sep 29 21:39:23 crc kubenswrapper[4911]: I0929 21:39:23.029457 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-c7c776c96-vdx55"] Sep 29 21:39:23 crc kubenswrapper[4911]: E0929 21:39:23.055034 4911 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/horizon-operator@sha256:f5f0d2eb534f763cf6578af513add1c21c1659b2cd75214dfddfedb9eebf6397,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-t7lzc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-operator-controller-manager-9f4696d94-fgfrh_openstack-operators(571a100e-c479-4a69-a8e3-8c7b1abe1bc5): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 29 21:39:23 crc kubenswrapper[4911]: I0929 21:39:23.071382 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2eba2279-aec7-4ad5-83d7-2ffd190b17e6-cert\") pod \"openstack-operator-controller-manager-7f894b9c96-4p2kz\" (UID: \"2eba2279-aec7-4ad5-83d7-2ffd190b17e6\") " pod="openstack-operators/openstack-operator-controller-manager-7f894b9c96-4p2kz" Sep 29 21:39:23 crc kubenswrapper[4911]: I0929 21:39:23.077726 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2eba2279-aec7-4ad5-83d7-2ffd190b17e6-cert\") pod \"openstack-operator-controller-manager-7f894b9c96-4p2kz\" (UID: \"2eba2279-aec7-4ad5-83d7-2ffd190b17e6\") " pod="openstack-operators/openstack-operator-controller-manager-7f894b9c96-4p2kz" Sep 29 21:39:23 crc kubenswrapper[4911]: E0929 21:39:23.201617 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-fgfrh" podUID="571a100e-c479-4a69-a8e3-8c7b1abe1bc5" Sep 29 21:39:23 crc kubenswrapper[4911]: I0929 21:39:23.257461 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-fmkl4" Sep 29 21:39:23 crc kubenswrapper[4911]: I0929 21:39:23.273486 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-7f894b9c96-4p2kz" Sep 29 21:39:23 crc kubenswrapper[4911]: I0929 21:39:23.319100 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-j9x29"] Sep 29 21:39:23 crc kubenswrapper[4911]: I0929 21:39:23.325880 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-64d7b59854-dj2hs"] Sep 29 21:39:23 crc kubenswrapper[4911]: W0929 21:39:23.345746 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4365babc_aa2d_4609_880b_f036dc6c367b.slice/crio-46da661e9fc4e0cb09bce6182eed2ef1ea775305792cf4d7bdad5a8f507e3e42 WatchSource:0}: Error finding container 46da661e9fc4e0cb09bce6182eed2ef1ea775305792cf4d7bdad5a8f507e3e42: Status 404 returned error can't find the container with id 46da661e9fc4e0cb09bce6182eed2ef1ea775305792cf4d7bdad5a8f507e3e42 Sep 29 21:39:23 crc kubenswrapper[4911]: I0929 21:39:23.345998 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-9976ff44c-dbm6k"] Sep 29 21:39:23 crc kubenswrapper[4911]: W0929 21:39:23.349061 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda9b7bc05_a0dc_421b_9e13_b00f3b8759f2.slice/crio-89628d137a30e4289d3a225eb56acb7dcc42dedf87c070e7b7b07ac7b59d7356 WatchSource:0}: Error finding container 89628d137a30e4289d3a225eb56acb7dcc42dedf87c070e7b7b07ac7b59d7356: Status 404 returned error can't find the container with id 89628d137a30e4289d3a225eb56acb7dcc42dedf87c070e7b7b07ac7b59d7356 Sep 29 21:39:23 crc kubenswrapper[4911]: I0929 21:39:23.350119 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-76669f99c-2gcq4"] Sep 29 21:39:23 crc kubenswrapper[4911]: E0929 21:39:23.353833 4911 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/neutron-operator@sha256:485df5c7813cdf4cf21f48ec48c8e3e4962fee6a1ae4c64f7af127d5ab346a10,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-dhppj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod neutron-operator-controller-manager-64d7b59854-dj2hs_openstack-operators(89b4d71d-9da4-43af-a8a9-54c89c771c22): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 29 21:39:23 crc kubenswrapper[4911]: I0929 21:39:23.354612 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-88c7-zrp9j"] Sep 29 21:39:23 crc kubenswrapper[4911]: I0929 21:39:23.359545 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-84f4f7b77b-hm6c8"] Sep 29 21:39:23 crc kubenswrapper[4911]: W0929 21:39:23.360358 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod96036e4c_a554_4d6c_8cd8_ef098c91f3a5.slice/crio-0ee9c960bb2794ef3b98b1416e43aa1b02982a6d075c5882ed3a540789602103 WatchSource:0}: Error finding container 0ee9c960bb2794ef3b98b1416e43aa1b02982a6d075c5882ed3a540789602103: Status 404 returned error can't find the container with id 0ee9c960bb2794ef3b98b1416e43aa1b02982a6d075c5882ed3a540789602103 Sep 29 21:39:23 crc kubenswrapper[4911]: E0929 21:39:23.361817 4911 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ovn-operator@sha256:1051afc168038fb814f75e7a5f07c588b295a83ebd143dcd8b46d799e31ad302,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-z87dn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-9976ff44c-dbm6k_openstack-operators(a9b7bc05-a0dc-421b-9e13-b00f3b8759f2): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 29 21:39:23 crc kubenswrapper[4911]: W0929 21:39:23.362198 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod97810e9c_ad62_45d0_a644_1362696f0087.slice/crio-11bbc70c28795af90ad9ead9a6437b62c19769b25e33fa1ecd28e00231d4bc49 WatchSource:0}: Error finding container 11bbc70c28795af90ad9ead9a6437b62c19769b25e33fa1ecd28e00231d4bc49: Status 404 returned error can't find the container with id 11bbc70c28795af90ad9ead9a6437b62c19769b25e33fa1ecd28e00231d4bc49 Sep 29 21:39:23 crc kubenswrapper[4911]: E0929 21:39:23.362536 4911 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:225524223bf2a7f3a4ce95958fc9ca6fdab02745fb70374e8ff5bf1ddaceda4b,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-wpxrk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-79d8469568-nh8dq_openstack-operators(96036e4c-a554-4d6c-8cd8-ef098c91f3a5): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 29 21:39:23 crc kubenswrapper[4911]: I0929 21:39:23.363392 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-f66b554c6-d6xcc"] Sep 29 21:39:23 crc kubenswrapper[4911]: E0929 21:39:23.363894 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-nh8dq" podUID="96036e4c-a554-4d6c-8cd8-ef098c91f3a5" Sep 29 21:39:23 crc kubenswrapper[4911]: E0929 21:39:23.364472 4911 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/designate-operator@sha256:f6b935f67979298c3c263ad84d277e5cf26c0dbba3f85f255c1ec4d1d75241d2,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-nfszs,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod designate-operator-controller-manager-84f4f7b77b-hm6c8_openstack-operators(97810e9c-ad62-45d0-a644-1362696f0087): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 29 21:39:23 crc kubenswrapper[4911]: I0929 21:39:23.368634 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-nh8dq"] Sep 29 21:39:23 crc kubenswrapper[4911]: W0929 21:39:23.371891 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod66254365_9a3e_4101_beff_6bcbdfe57222.slice/crio-d51cbc9fb8b5fe306f1dcdf0e89d8b1879332cbaa0307bdf39e53a7013eb483c WatchSource:0}: Error finding container d51cbc9fb8b5fe306f1dcdf0e89d8b1879332cbaa0307bdf39e53a7013eb483c: Status 404 returned error can't find the container with id d51cbc9fb8b5fe306f1dcdf0e89d8b1879332cbaa0307bdf39e53a7013eb483c Sep 29 21:39:23 crc kubenswrapper[4911]: I0929 21:39:23.374970 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-bc7dc7bd9-k6296"] Sep 29 21:39:23 crc kubenswrapper[4911]: E0929 21:39:23.375782 4911 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/mariadb-operator@sha256:15d7b5a365350a831ca59d984df67fadeccf89d599e487a7597b105afb82ce4a,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-9prh7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod mariadb-operator-controller-manager-88c7-zrp9j_openstack-operators(66254365-9a3e-4101-beff-6bcbdfe57222): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 29 21:39:23 crc kubenswrapper[4911]: E0929 21:39:23.382601 4911 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:a303e460aec09217f90043b8ff19c01061af003b614833b33a593df9c00ddf80,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-b5tx8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-f66b554c6-d6xcc_openstack-operators(95ec5351-b5ce-417e-ba1c-dcd76592fa6b): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 29 21:39:23 crc kubenswrapper[4911]: E0929 21:39:23.384126 4911 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:3c6f7d737e0196ec302f44354228d783ad3b210a75703dda3b39c15c01a67e8c,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-ctdpk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-bc7dc7bd9-k6296_openstack-operators(f86ad598-06ae-4ff1-90fc-1770d3b9797c): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 29 21:39:23 crc kubenswrapper[4911]: I0929 21:39:23.654622 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-88c7-zrp9j" event={"ID":"66254365-9a3e-4101-beff-6bcbdfe57222","Type":"ContainerStarted","Data":"d51cbc9fb8b5fe306f1dcdf0e89d8b1879332cbaa0307bdf39e53a7013eb483c"} Sep 29 21:39:23 crc kubenswrapper[4911]: I0929 21:39:23.742725 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-fgfrh" event={"ID":"571a100e-c479-4a69-a8e3-8c7b1abe1bc5","Type":"ContainerStarted","Data":"f4d8e9c7a020c427c2c139a55e74b604173e9088f403c789ec57814543612382"} Sep 29 21:39:23 crc kubenswrapper[4911]: I0929 21:39:23.742776 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-fgfrh" event={"ID":"571a100e-c479-4a69-a8e3-8c7b1abe1bc5","Type":"ContainerStarted","Data":"5ab832866f7fff8dfc57e09507a43962c6465a908c90492dedd4b4fe6e1b983b"} Sep 29 21:39:23 crc kubenswrapper[4911]: E0929 21:39:23.749003 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-hm6c8" podUID="97810e9c-ad62-45d0-a644-1362696f0087" Sep 29 21:39:23 crc kubenswrapper[4911]: E0929 21:39:23.749289 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/mariadb-operator-controller-manager-88c7-zrp9j" podUID="66254365-9a3e-4101-beff-6bcbdfe57222" Sep 29 21:39:23 crc kubenswrapper[4911]: I0929 21:39:23.765483 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-2rlqk" event={"ID":"c30e2b8c-3f72-4e6f-a5e4-ca27d53f5194","Type":"ContainerStarted","Data":"1559fee890aba64e7d85cbcff12cb767183adb777c9089007a8e42b40ea3d4ad"} Sep 29 21:39:23 crc kubenswrapper[4911]: E0929 21:39:23.774296 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/horizon-operator@sha256:f5f0d2eb534f763cf6578af513add1c21c1659b2cd75214dfddfedb9eebf6397\\\"\"" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-fgfrh" podUID="571a100e-c479-4a69-a8e3-8c7b1abe1bc5" Sep 29 21:39:23 crc kubenswrapper[4911]: I0929 21:39:23.792278 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-dbm6k" event={"ID":"a9b7bc05-a0dc-421b-9e13-b00f3b8759f2","Type":"ContainerStarted","Data":"89628d137a30e4289d3a225eb56acb7dcc42dedf87c070e7b7b07ac7b59d7356"} Sep 29 21:39:23 crc kubenswrapper[4911]: I0929 21:39:23.806981 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-b8mfd" event={"ID":"7b9b9966-b82d-481d-9ca9-062a883ffd1c","Type":"ContainerStarted","Data":"d248406422b631426a13cc4c9fd8e7a98e2f49b0b35b0aef8d9791958c457391"} Sep 29 21:39:23 crc kubenswrapper[4911]: I0929 21:39:23.859054 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-dj2hs" event={"ID":"89b4d71d-9da4-43af-a8a9-54c89c771c22","Type":"ContainerStarted","Data":"e8b540c5306578b6494c1d65273434b690747d73e990b93832f3ba237440f4ef"} Sep 29 21:39:23 crc kubenswrapper[4911]: I0929 21:39:23.917940 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-hm6c8" event={"ID":"97810e9c-ad62-45d0-a644-1362696f0087","Type":"ContainerStarted","Data":"11bbc70c28795af90ad9ead9a6437b62c19769b25e33fa1ecd28e00231d4bc49"} Sep 29 21:39:23 crc kubenswrapper[4911]: I0929 21:39:23.921536 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-7f894b9c96-4p2kz"] Sep 29 21:39:23 crc kubenswrapper[4911]: E0929 21:39:23.933612 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/designate-operator@sha256:f6b935f67979298c3c263ad84d277e5cf26c0dbba3f85f255c1ec4d1d75241d2\\\"\"" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-hm6c8" podUID="97810e9c-ad62-45d0-a644-1362696f0087" Sep 29 21:39:23 crc kubenswrapper[4911]: I0929 21:39:23.936770 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-zh4ln" event={"ID":"9049bfc1-ea91-4483-9f36-3ffaa5f250c7","Type":"ContainerStarted","Data":"48b86ce018079770e1ca706209bcdcf642c52243a50f38a04e9481a15fcfdc40"} Sep 29 21:39:23 crc kubenswrapper[4911]: I0929 21:39:23.940206 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-j9x29" event={"ID":"e6607b81-fac2-4fb0-a19c-a4b01eef9fd2","Type":"ContainerStarted","Data":"f49f0689136f2f9944deabefd2050dabcd62dc65c56a3344aa6987feda8bef2d"} Sep 29 21:39:23 crc kubenswrapper[4911]: I0929 21:39:23.953661 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-vdx55" event={"ID":"09452e7c-7e3f-4ca5-ae5c-c321e2a581ee","Type":"ContainerStarted","Data":"b1747c0fcfdab9928e27e6a42ff141a35cd2b047bbcb54d5c7da1ce96d03622c"} Sep 29 21:39:23 crc kubenswrapper[4911]: I0929 21:39:23.984129 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-f66b554c6-d6xcc" event={"ID":"95ec5351-b5ce-417e-ba1c-dcd76592fa6b","Type":"ContainerStarted","Data":"d64e2cdcfd4c0ba5263e1b0a63842be02c367bc0edeb64f2db7687b851440e01"} Sep 29 21:39:23 crc kubenswrapper[4911]: I0929 21:39:23.988675 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-k6296" event={"ID":"f86ad598-06ae-4ff1-90fc-1770d3b9797c","Type":"ContainerStarted","Data":"09cb37345280233714bda832076c29f0aa8ad5cd80ed2b860aa6fc6721dcd4db"} Sep 29 21:39:23 crc kubenswrapper[4911]: I0929 21:39:23.992597 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-lvplw" event={"ID":"afd159c8-b67d-46c6-8417-16f505314359","Type":"ContainerStarted","Data":"7016f1e89ba0915659eb3ceb3394e9aa7c77caa9d7d7ae05dca1d7f0013d4ce7"} Sep 29 21:39:24 crc kubenswrapper[4911]: I0929 21:39:23.996245 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-cnn7f" event={"ID":"0ffadab1-e334-4ec5-8e50-2a589230e880","Type":"ContainerStarted","Data":"b25683789b4abb33690bddb244ecb4098d395b3d8128fcb1103dcc83482d43a1"} Sep 29 21:39:24 crc kubenswrapper[4911]: I0929 21:39:24.006957 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-2gcq4" event={"ID":"4365babc-aa2d-4609-880b-f036dc6c367b","Type":"ContainerStarted","Data":"46da661e9fc4e0cb09bce6182eed2ef1ea775305792cf4d7bdad5a8f507e3e42"} Sep 29 21:39:24 crc kubenswrapper[4911]: I0929 21:39:24.014745 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-96r7j" event={"ID":"0b825f85-6069-4d69-b1a8-9404542556cb","Type":"ContainerStarted","Data":"a69194cfbefc1022a7925745d1811ea4b79dc7f0e05a560cd203db452f02ae01"} Sep 29 21:39:24 crc kubenswrapper[4911]: I0929 21:39:24.029420 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-ps6zb" event={"ID":"3dff6b09-12d0-462d-8558-175673f2ee0e","Type":"ContainerStarted","Data":"d6ceffeef1db1329a9dc95995771b15b199a90545373fae6d67e43d8c389c382"} Sep 29 21:39:24 crc kubenswrapper[4911]: I0929 21:39:24.030854 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-66c64d68d6-mn744" event={"ID":"e6468afc-47e4-4281-9049-9209a4eb8d73","Type":"ContainerStarted","Data":"c1feef2d01a59d47650ecb05e668171b72a76cf6755a86d80e1dd1c5af486f99"} Sep 29 21:39:24 crc kubenswrapper[4911]: I0929 21:39:24.036046 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-nh8dq" event={"ID":"96036e4c-a554-4d6c-8cd8-ef098c91f3a5","Type":"ContainerStarted","Data":"0ee9c960bb2794ef3b98b1416e43aa1b02982a6d075c5882ed3a540789602103"} Sep 29 21:39:24 crc kubenswrapper[4911]: I0929 21:39:24.055006 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-l6zpg" event={"ID":"73f9cfcd-2c46-4070-9734-4ec07c824a9f","Type":"ContainerStarted","Data":"2db55358c5fe86769d54a3009f12dda03b32a1e19b0d8028f0589b5dbd5e41a7"} Sep 29 21:39:24 crc kubenswrapper[4911]: E0929 21:39:24.061934 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:225524223bf2a7f3a4ce95958fc9ca6fdab02745fb70374e8ff5bf1ddaceda4b\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-nh8dq" podUID="96036e4c-a554-4d6c-8cd8-ef098c91f3a5" Sep 29 21:39:24 crc kubenswrapper[4911]: E0929 21:39:24.066931 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-dj2hs" podUID="89b4d71d-9da4-43af-a8a9-54c89c771c22" Sep 29 21:39:24 crc kubenswrapper[4911]: E0929 21:39:24.126246 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-dbm6k" podUID="a9b7bc05-a0dc-421b-9e13-b00f3b8759f2" Sep 29 21:39:24 crc kubenswrapper[4911]: E0929 21:39:24.137990 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/test-operator-controller-manager-f66b554c6-d6xcc" podUID="95ec5351-b5ce-417e-ba1c-dcd76592fa6b" Sep 29 21:39:24 crc kubenswrapper[4911]: E0929 21:39:24.148405 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-k6296" podUID="f86ad598-06ae-4ff1-90fc-1770d3b9797c" Sep 29 21:39:25 crc kubenswrapper[4911]: I0929 21:39:25.067032 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-hm6c8" event={"ID":"97810e9c-ad62-45d0-a644-1362696f0087","Type":"ContainerStarted","Data":"3512508c44d7152dd85688ebfce647a4cf03e08271ae24b1bd993287c6b37397"} Sep 29 21:39:25 crc kubenswrapper[4911]: E0929 21:39:25.069628 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/designate-operator@sha256:f6b935f67979298c3c263ad84d277e5cf26c0dbba3f85f255c1ec4d1d75241d2\\\"\"" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-hm6c8" podUID="97810e9c-ad62-45d0-a644-1362696f0087" Sep 29 21:39:25 crc kubenswrapper[4911]: I0929 21:39:25.076041 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-k6296" event={"ID":"f86ad598-06ae-4ff1-90fc-1770d3b9797c","Type":"ContainerStarted","Data":"138ff7c7ea507b4f1b7cda5e2848f09852cf59bd745877d70b19fc1a93babaed"} Sep 29 21:39:25 crc kubenswrapper[4911]: I0929 21:39:25.078727 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-f66b554c6-d6xcc" event={"ID":"95ec5351-b5ce-417e-ba1c-dcd76592fa6b","Type":"ContainerStarted","Data":"baaad150d460332494028c7f97ccb4399727b1047cdebe0b1fbb8349a2a96590"} Sep 29 21:39:25 crc kubenswrapper[4911]: E0929 21:39:25.079821 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:3c6f7d737e0196ec302f44354228d783ad3b210a75703dda3b39c15c01a67e8c\\\"\"" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-k6296" podUID="f86ad598-06ae-4ff1-90fc-1770d3b9797c" Sep 29 21:39:25 crc kubenswrapper[4911]: E0929 21:39:25.080206 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:a303e460aec09217f90043b8ff19c01061af003b614833b33a593df9c00ddf80\\\"\"" pod="openstack-operators/test-operator-controller-manager-f66b554c6-d6xcc" podUID="95ec5351-b5ce-417e-ba1c-dcd76592fa6b" Sep 29 21:39:25 crc kubenswrapper[4911]: I0929 21:39:25.081704 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-dj2hs" event={"ID":"89b4d71d-9da4-43af-a8a9-54c89c771c22","Type":"ContainerStarted","Data":"30ac7a3ebc7c2f1752598ca916252283f32b7edb2fa5d0a1d000dbd4260767a0"} Sep 29 21:39:25 crc kubenswrapper[4911]: E0929 21:39:25.083646 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/neutron-operator@sha256:485df5c7813cdf4cf21f48ec48c8e3e4962fee6a1ae4c64f7af127d5ab346a10\\\"\"" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-dj2hs" podUID="89b4d71d-9da4-43af-a8a9-54c89c771c22" Sep 29 21:39:25 crc kubenswrapper[4911]: I0929 21:39:25.085535 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-7f894b9c96-4p2kz" event={"ID":"2eba2279-aec7-4ad5-83d7-2ffd190b17e6","Type":"ContainerStarted","Data":"808161e61859fe93ae53eb846af804cfbcecdef4a15b9bff605efcda63a73ea1"} Sep 29 21:39:25 crc kubenswrapper[4911]: I0929 21:39:25.086115 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-7f894b9c96-4p2kz" Sep 29 21:39:25 crc kubenswrapper[4911]: I0929 21:39:25.086301 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-7f894b9c96-4p2kz" event={"ID":"2eba2279-aec7-4ad5-83d7-2ffd190b17e6","Type":"ContainerStarted","Data":"cd880736dd9d1952e19f9159c0d8af8c8b4bee34324c687341313f7167374eb3"} Sep 29 21:39:25 crc kubenswrapper[4911]: I0929 21:39:25.086313 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-7f894b9c96-4p2kz" event={"ID":"2eba2279-aec7-4ad5-83d7-2ffd190b17e6","Type":"ContainerStarted","Data":"3fa83503f3f7ade958e832317a8c176bf923c79ec8ff87300450c03ae2067738"} Sep 29 21:39:25 crc kubenswrapper[4911]: I0929 21:39:25.097819 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-88c7-zrp9j" event={"ID":"66254365-9a3e-4101-beff-6bcbdfe57222","Type":"ContainerStarted","Data":"d4fb150f008a7761cec35ea7818b8ad6dd1d94e6549036517c5397668b9ab09f"} Sep 29 21:39:25 crc kubenswrapper[4911]: E0929 21:39:25.104083 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/mariadb-operator@sha256:15d7b5a365350a831ca59d984df67fadeccf89d599e487a7597b105afb82ce4a\\\"\"" pod="openstack-operators/mariadb-operator-controller-manager-88c7-zrp9j" podUID="66254365-9a3e-4101-beff-6bcbdfe57222" Sep 29 21:39:25 crc kubenswrapper[4911]: I0929 21:39:25.106427 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-dbm6k" event={"ID":"a9b7bc05-a0dc-421b-9e13-b00f3b8759f2","Type":"ContainerStarted","Data":"a5e3afc0a9d4fa356a5fcbbbb6c8af173482135f2e4cc1ea982d8a451cdcfe78"} Sep 29 21:39:25 crc kubenswrapper[4911]: E0929 21:39:25.108254 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:225524223bf2a7f3a4ce95958fc9ca6fdab02745fb70374e8ff5bf1ddaceda4b\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-nh8dq" podUID="96036e4c-a554-4d6c-8cd8-ef098c91f3a5" Sep 29 21:39:25 crc kubenswrapper[4911]: E0929 21:39:25.108542 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/horizon-operator@sha256:f5f0d2eb534f763cf6578af513add1c21c1659b2cd75214dfddfedb9eebf6397\\\"\"" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-fgfrh" podUID="571a100e-c479-4a69-a8e3-8c7b1abe1bc5" Sep 29 21:39:25 crc kubenswrapper[4911]: E0929 21:39:25.109533 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:1051afc168038fb814f75e7a5f07c588b295a83ebd143dcd8b46d799e31ad302\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-dbm6k" podUID="a9b7bc05-a0dc-421b-9e13-b00f3b8759f2" Sep 29 21:39:25 crc kubenswrapper[4911]: I0929 21:39:25.132959 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-7f894b9c96-4p2kz" podStartSLOduration=4.13294109 podStartE2EDuration="4.13294109s" podCreationTimestamp="2025-09-29 21:39:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:39:25.13228318 +0000 UTC m=+843.109395871" watchObservedRunningTime="2025-09-29 21:39:25.13294109 +0000 UTC m=+843.110053761" Sep 29 21:39:26 crc kubenswrapper[4911]: E0929 21:39:26.119451 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/designate-operator@sha256:f6b935f67979298c3c263ad84d277e5cf26c0dbba3f85f255c1ec4d1d75241d2\\\"\"" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-hm6c8" podUID="97810e9c-ad62-45d0-a644-1362696f0087" Sep 29 21:39:26 crc kubenswrapper[4911]: E0929 21:39:26.120825 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:a303e460aec09217f90043b8ff19c01061af003b614833b33a593df9c00ddf80\\\"\"" pod="openstack-operators/test-operator-controller-manager-f66b554c6-d6xcc" podUID="95ec5351-b5ce-417e-ba1c-dcd76592fa6b" Sep 29 21:39:26 crc kubenswrapper[4911]: E0929 21:39:26.120834 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/mariadb-operator@sha256:15d7b5a365350a831ca59d984df67fadeccf89d599e487a7597b105afb82ce4a\\\"\"" pod="openstack-operators/mariadb-operator-controller-manager-88c7-zrp9j" podUID="66254365-9a3e-4101-beff-6bcbdfe57222" Sep 29 21:39:26 crc kubenswrapper[4911]: E0929 21:39:26.120916 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:1051afc168038fb814f75e7a5f07c588b295a83ebd143dcd8b46d799e31ad302\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-dbm6k" podUID="a9b7bc05-a0dc-421b-9e13-b00f3b8759f2" Sep 29 21:39:26 crc kubenswrapper[4911]: E0929 21:39:26.121011 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:3c6f7d737e0196ec302f44354228d783ad3b210a75703dda3b39c15c01a67e8c\\\"\"" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-k6296" podUID="f86ad598-06ae-4ff1-90fc-1770d3b9797c" Sep 29 21:39:26 crc kubenswrapper[4911]: E0929 21:39:26.121148 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/neutron-operator@sha256:485df5c7813cdf4cf21f48ec48c8e3e4962fee6a1ae4c64f7af127d5ab346a10\\\"\"" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-dj2hs" podUID="89b4d71d-9da4-43af-a8a9-54c89c771c22" Sep 29 21:39:33 crc kubenswrapper[4911]: I0929 21:39:33.281172 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-7f894b9c96-4p2kz" Sep 29 21:39:35 crc kubenswrapper[4911]: I0929 21:39:35.232382 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-ssgn9" event={"ID":"10f051aa-3e26-4a9b-89a1-d5bd8e58ba16","Type":"ContainerStarted","Data":"8f225fd0c8ded0cf04dff1f27a378f4d72384615660795fe6c944a013d505558"} Sep 29 21:39:35 crc kubenswrapper[4911]: I0929 21:39:35.252040 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-zh4ln" event={"ID":"9049bfc1-ea91-4483-9f36-3ffaa5f250c7","Type":"ContainerStarted","Data":"22844b74c76e926a1cd59c97271c42ad77c268bb585ac40b13601c22557e9cbd"} Sep 29 21:39:35 crc kubenswrapper[4911]: I0929 21:39:35.285051 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-2gcq4" event={"ID":"4365babc-aa2d-4609-880b-f036dc6c367b","Type":"ContainerStarted","Data":"7e0745160a19bc18acb7f1f0c3fe93d1fca47103a0c1a465e2f98ffc3d72242a"} Sep 29 21:39:35 crc kubenswrapper[4911]: I0929 21:39:35.288892 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-b8mfd" event={"ID":"7b9b9966-b82d-481d-9ca9-062a883ffd1c","Type":"ContainerStarted","Data":"15f73fc03bf11404e76dd8591caea6d463e04d69fb0232be3a77dc1f4d58b6ca"} Sep 29 21:39:35 crc kubenswrapper[4911]: I0929 21:39:35.300573 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-ps6zb" event={"ID":"3dff6b09-12d0-462d-8558-175673f2ee0e","Type":"ContainerStarted","Data":"b12b37f2fe6c838cbd6f9dbb84e3aa747085fe56b5fe9144502f171c0adbd9ce"} Sep 29 21:39:35 crc kubenswrapper[4911]: I0929 21:39:35.301824 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-j9x29" event={"ID":"e6607b81-fac2-4fb0-a19c-a4b01eef9fd2","Type":"ContainerStarted","Data":"518aa5cf06728ff14f8e8a1b86f71023bceb25898d93974ec431e2d3426eba18"} Sep 29 21:39:35 crc kubenswrapper[4911]: I0929 21:39:35.313994 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-lvplw" event={"ID":"afd159c8-b67d-46c6-8417-16f505314359","Type":"ContainerStarted","Data":"2d0362161ca2157cc0e09e267cde7daa0c0c68261ed798baf9514e4c5fc86ed5"} Sep 29 21:39:35 crc kubenswrapper[4911]: I0929 21:39:35.332667 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-66c64d68d6-mn744" event={"ID":"e6468afc-47e4-4281-9049-9209a4eb8d73","Type":"ContainerStarted","Data":"8213fc998287d1801a04ed3a9da70135f6fd9a79311034e1987150b8c3c90c95"} Sep 29 21:39:35 crc kubenswrapper[4911]: I0929 21:39:35.349043 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-82p2v" event={"ID":"a9ab461d-94d1-487f-9854-a5e7e80f88ed","Type":"ContainerStarted","Data":"534b39a34ff002c2c2a13ee6cb2d137aa0021f62396c17dd44846c80d04fdf16"} Sep 29 21:39:35 crc kubenswrapper[4911]: I0929 21:39:35.349098 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-82p2v" event={"ID":"a9ab461d-94d1-487f-9854-a5e7e80f88ed","Type":"ContainerStarted","Data":"8cd2cc688905ec446e71726289c28f24b13e529474b8433cfe1fbe9c2f66d4f4"} Sep 29 21:39:35 crc kubenswrapper[4911]: I0929 21:39:35.349986 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-82p2v" Sep 29 21:39:35 crc kubenswrapper[4911]: I0929 21:39:35.360109 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-96r7j" event={"ID":"0b825f85-6069-4d69-b1a8-9404542556cb","Type":"ContainerStarted","Data":"36d7bccee1af2ae68f465496eef64594b10e3bf7bd2c5d904ad73c5829c85a8d"} Sep 29 21:39:35 crc kubenswrapper[4911]: I0929 21:39:35.370982 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-2rlqk" event={"ID":"c30e2b8c-3f72-4e6f-a5e4-ca27d53f5194","Type":"ContainerStarted","Data":"d1521f9ca7eaa4bea7191d3df780234039ec28d73222e047c6382c1519c1e4da"} Sep 29 21:39:35 crc kubenswrapper[4911]: I0929 21:39:35.372338 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-vdx55" event={"ID":"09452e7c-7e3f-4ca5-ae5c-c321e2a581ee","Type":"ContainerStarted","Data":"3cf5023f044f0412f6c016a871cc22cb29242e09c2befec2d11f9d986a3917b4"} Sep 29 21:39:35 crc kubenswrapper[4911]: I0929 21:39:35.389075 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-l6zpg" event={"ID":"73f9cfcd-2c46-4070-9734-4ec07c824a9f","Type":"ContainerStarted","Data":"8b667757fa8472e2c51d665a2c13a9828b34ac54698adab4111ef0d8d292a9c8"} Sep 29 21:39:35 crc kubenswrapper[4911]: I0929 21:39:35.389115 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-l6zpg" event={"ID":"73f9cfcd-2c46-4070-9734-4ec07c824a9f","Type":"ContainerStarted","Data":"4d86721319217f946d7a99ec6f5b0dfaa1856e7b5b3da4c3e418c33c7389ab74"} Sep 29 21:39:35 crc kubenswrapper[4911]: I0929 21:39:35.389680 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-l6zpg" Sep 29 21:39:35 crc kubenswrapper[4911]: I0929 21:39:35.399826 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-82p2v" podStartSLOduration=4.132816388 podStartE2EDuration="15.39980679s" podCreationTimestamp="2025-09-29 21:39:20 +0000 UTC" firstStartedPulling="2025-09-29 21:39:22.483701717 +0000 UTC m=+840.460814387" lastFinishedPulling="2025-09-29 21:39:33.750692118 +0000 UTC m=+851.727804789" observedRunningTime="2025-09-29 21:39:35.392881214 +0000 UTC m=+853.369993885" watchObservedRunningTime="2025-09-29 21:39:35.39980679 +0000 UTC m=+853.376919451" Sep 29 21:39:35 crc kubenswrapper[4911]: I0929 21:39:35.428217 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-cnn7f" event={"ID":"0ffadab1-e334-4ec5-8e50-2a589230e880","Type":"ContainerStarted","Data":"7e5948bfd20fce1d8a906e7bf58cb18e46266a8df5c8d291a5231ad8f97e079d"} Sep 29 21:39:35 crc kubenswrapper[4911]: I0929 21:39:35.439841 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-l6zpg" podStartSLOduration=4.668774135 podStartE2EDuration="15.439824275s" podCreationTimestamp="2025-09-29 21:39:20 +0000 UTC" firstStartedPulling="2025-09-29 21:39:23.007281038 +0000 UTC m=+840.984393709" lastFinishedPulling="2025-09-29 21:39:33.778331178 +0000 UTC m=+851.755443849" observedRunningTime="2025-09-29 21:39:35.437917285 +0000 UTC m=+853.415029966" watchObservedRunningTime="2025-09-29 21:39:35.439824275 +0000 UTC m=+853.416936936" Sep 29 21:39:36 crc kubenswrapper[4911]: I0929 21:39:36.437722 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-cnn7f" event={"ID":"0ffadab1-e334-4ec5-8e50-2a589230e880","Type":"ContainerStarted","Data":"b50861d53d0be081c47e293d905c3f28555e3329cedb88ad37e216922a16a529"} Sep 29 21:39:36 crc kubenswrapper[4911]: I0929 21:39:36.439040 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-cnn7f" Sep 29 21:39:36 crc kubenswrapper[4911]: I0929 21:39:36.446994 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-2rlqk" event={"ID":"c30e2b8c-3f72-4e6f-a5e4-ca27d53f5194","Type":"ContainerStarted","Data":"0f55d84f9961b54a430c5e7f9a4c1c8173bfb3356a01fcc963320a1af66ab462"} Sep 29 21:39:36 crc kubenswrapper[4911]: I0929 21:39:36.447129 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-2rlqk" Sep 29 21:39:36 crc kubenswrapper[4911]: I0929 21:39:36.451037 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-ps6zb" event={"ID":"3dff6b09-12d0-462d-8558-175673f2ee0e","Type":"ContainerStarted","Data":"427ac4727737ea0cddb0f761fd1ae19f819a3eda4f8e6db2db9db15902650475"} Sep 29 21:39:36 crc kubenswrapper[4911]: I0929 21:39:36.451830 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-ps6zb" Sep 29 21:39:36 crc kubenswrapper[4911]: I0929 21:39:36.454215 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-j9x29" event={"ID":"e6607b81-fac2-4fb0-a19c-a4b01eef9fd2","Type":"ContainerStarted","Data":"7d782db4039f7109156e6ad66ae00717d7baa4db5e9554ec6565ccaa7fd4a092"} Sep 29 21:39:36 crc kubenswrapper[4911]: I0929 21:39:36.454381 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-j9x29" Sep 29 21:39:36 crc kubenswrapper[4911]: I0929 21:39:36.460723 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-66c64d68d6-mn744" event={"ID":"e6468afc-47e4-4281-9049-9209a4eb8d73","Type":"ContainerStarted","Data":"0d9b792187d1eff29c7a5b969840905ebfbb11b86eac08fa780c7978851d6579"} Sep 29 21:39:36 crc kubenswrapper[4911]: I0929 21:39:36.461023 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-66c64d68d6-mn744" Sep 29 21:39:36 crc kubenswrapper[4911]: I0929 21:39:36.466225 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-vdx55" event={"ID":"09452e7c-7e3f-4ca5-ae5c-c321e2a581ee","Type":"ContainerStarted","Data":"3e9ec169bc385f7b19d5f25f760ad29db46fd46957738952a26ee2164cfc9bcc"} Sep 29 21:39:36 crc kubenswrapper[4911]: I0929 21:39:36.466677 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-vdx55" Sep 29 21:39:36 crc kubenswrapper[4911]: I0929 21:39:36.467504 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-cnn7f" podStartSLOduration=5.693813462 podStartE2EDuration="16.467483253s" podCreationTimestamp="2025-09-29 21:39:20 +0000 UTC" firstStartedPulling="2025-09-29 21:39:23.037603341 +0000 UTC m=+841.014716012" lastFinishedPulling="2025-09-29 21:39:33.811273132 +0000 UTC m=+851.788385803" observedRunningTime="2025-09-29 21:39:36.459400182 +0000 UTC m=+854.436512863" watchObservedRunningTime="2025-09-29 21:39:36.467483253 +0000 UTC m=+854.444595964" Sep 29 21:39:36 crc kubenswrapper[4911]: I0929 21:39:36.479185 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-2gcq4" event={"ID":"4365babc-aa2d-4609-880b-f036dc6c367b","Type":"ContainerStarted","Data":"67f2cdbd2c67fab14d14aff02dfc86337ccd8dd8ddb170d9c34b97982e794fe6"} Sep 29 21:39:36 crc kubenswrapper[4911]: I0929 21:39:36.479957 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-2gcq4" Sep 29 21:39:36 crc kubenswrapper[4911]: I0929 21:39:36.481902 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-b8mfd" event={"ID":"7b9b9966-b82d-481d-9ca9-062a883ffd1c","Type":"ContainerStarted","Data":"459479e64d1cf122724df3de6793af1f70e31be197ceef9e6855fc362f145e0b"} Sep 29 21:39:36 crc kubenswrapper[4911]: I0929 21:39:36.482395 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-b8mfd" Sep 29 21:39:36 crc kubenswrapper[4911]: I0929 21:39:36.484965 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-96r7j" event={"ID":"0b825f85-6069-4d69-b1a8-9404542556cb","Type":"ContainerStarted","Data":"9e7f79083a1bbf7a4fc6930a2a265c192bf47e1c89e7ee3e470b8439ae428abb"} Sep 29 21:39:36 crc kubenswrapper[4911]: I0929 21:39:36.485087 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-96r7j" Sep 29 21:39:36 crc kubenswrapper[4911]: I0929 21:39:36.486583 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-ssgn9" event={"ID":"10f051aa-3e26-4a9b-89a1-d5bd8e58ba16","Type":"ContainerStarted","Data":"4dccb6e62e3e4600b5c1254a27b251a0ab6bf5268ff444360c6a35d1c608cf9d"} Sep 29 21:39:36 crc kubenswrapper[4911]: I0929 21:39:36.490440 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-zh4ln" event={"ID":"9049bfc1-ea91-4483-9f36-3ffaa5f250c7","Type":"ContainerStarted","Data":"d7c5f42822a2734ba8577d7199d63c0faf2b8c15cb83ce2dd4238a41bec7855f"} Sep 29 21:39:36 crc kubenswrapper[4911]: I0929 21:39:36.490605 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-zh4ln" Sep 29 21:39:36 crc kubenswrapper[4911]: I0929 21:39:36.491834 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-j9x29" podStartSLOduration=6.033639159 podStartE2EDuration="16.491816911s" podCreationTimestamp="2025-09-29 21:39:20 +0000 UTC" firstStartedPulling="2025-09-29 21:39:23.349682534 +0000 UTC m=+841.326795205" lastFinishedPulling="2025-09-29 21:39:33.807860286 +0000 UTC m=+851.784972957" observedRunningTime="2025-09-29 21:39:36.490729157 +0000 UTC m=+854.467841828" watchObservedRunningTime="2025-09-29 21:39:36.491816911 +0000 UTC m=+854.468929602" Sep 29 21:39:36 crc kubenswrapper[4911]: I0929 21:39:36.492671 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-lvplw" event={"ID":"afd159c8-b67d-46c6-8417-16f505314359","Type":"ContainerStarted","Data":"a6561a920c81519de7ce4f1df2bcf7f36f1d254b90cf4cfc1d869c0723ca9c8d"} Sep 29 21:39:36 crc kubenswrapper[4911]: I0929 21:39:36.512907 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-ps6zb" podStartSLOduration=5.660830224 podStartE2EDuration="16.512887967s" podCreationTimestamp="2025-09-29 21:39:20 +0000 UTC" firstStartedPulling="2025-09-29 21:39:23.017908756 +0000 UTC m=+840.995021427" lastFinishedPulling="2025-09-29 21:39:33.869966489 +0000 UTC m=+851.847079170" observedRunningTime="2025-09-29 21:39:36.508906762 +0000 UTC m=+854.486019443" watchObservedRunningTime="2025-09-29 21:39:36.512887967 +0000 UTC m=+854.490000658" Sep 29 21:39:36 crc kubenswrapper[4911]: I0929 21:39:36.524981 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-2rlqk" podStartSLOduration=5.736145101 podStartE2EDuration="16.524962372s" podCreationTimestamp="2025-09-29 21:39:20 +0000 UTC" firstStartedPulling="2025-09-29 21:39:23.054841689 +0000 UTC m=+841.031954360" lastFinishedPulling="2025-09-29 21:39:33.84365896 +0000 UTC m=+851.820771631" observedRunningTime="2025-09-29 21:39:36.524540919 +0000 UTC m=+854.501653600" watchObservedRunningTime="2025-09-29 21:39:36.524962372 +0000 UTC m=+854.502075053" Sep 29 21:39:36 crc kubenswrapper[4911]: I0929 21:39:36.551391 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-66c64d68d6-mn744" podStartSLOduration=4.757510061 podStartE2EDuration="15.551374184s" podCreationTimestamp="2025-09-29 21:39:21 +0000 UTC" firstStartedPulling="2025-09-29 21:39:23.016330555 +0000 UTC m=+840.993443226" lastFinishedPulling="2025-09-29 21:39:33.810194668 +0000 UTC m=+851.787307349" observedRunningTime="2025-09-29 21:39:36.545986985 +0000 UTC m=+854.523099706" watchObservedRunningTime="2025-09-29 21:39:36.551374184 +0000 UTC m=+854.528486855" Sep 29 21:39:36 crc kubenswrapper[4911]: I0929 21:39:36.566133 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-vdx55" podStartSLOduration=5.819629148 podStartE2EDuration="16.566116132s" podCreationTimestamp="2025-09-29 21:39:20 +0000 UTC" firstStartedPulling="2025-09-29 21:39:23.024704871 +0000 UTC m=+841.001817542" lastFinishedPulling="2025-09-29 21:39:33.771191835 +0000 UTC m=+851.748304526" observedRunningTime="2025-09-29 21:39:36.561069135 +0000 UTC m=+854.538181816" watchObservedRunningTime="2025-09-29 21:39:36.566116132 +0000 UTC m=+854.543228823" Sep 29 21:39:36 crc kubenswrapper[4911]: I0929 21:39:36.578587 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-lvplw" podStartSLOduration=5.741600905 podStartE2EDuration="16.578565479s" podCreationTimestamp="2025-09-29 21:39:20 +0000 UTC" firstStartedPulling="2025-09-29 21:39:22.971630085 +0000 UTC m=+840.948742756" lastFinishedPulling="2025-09-29 21:39:33.808594659 +0000 UTC m=+851.785707330" observedRunningTime="2025-09-29 21:39:36.575968728 +0000 UTC m=+854.553081399" watchObservedRunningTime="2025-09-29 21:39:36.578565479 +0000 UTC m=+854.555678150" Sep 29 21:39:36 crc kubenswrapper[4911]: I0929 21:39:36.599560 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-b8mfd" podStartSLOduration=5.431324839 podStartE2EDuration="16.599539212s" podCreationTimestamp="2025-09-29 21:39:20 +0000 UTC" firstStartedPulling="2025-09-29 21:39:22.64373658 +0000 UTC m=+840.620849251" lastFinishedPulling="2025-09-29 21:39:33.811950953 +0000 UTC m=+851.789063624" observedRunningTime="2025-09-29 21:39:36.5949652 +0000 UTC m=+854.572077891" watchObservedRunningTime="2025-09-29 21:39:36.599539212 +0000 UTC m=+854.576651903" Sep 29 21:39:36 crc kubenswrapper[4911]: I0929 21:39:36.616683 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-zh4ln" podStartSLOduration=5.755905537 podStartE2EDuration="16.616667915s" podCreationTimestamp="2025-09-29 21:39:20 +0000 UTC" firstStartedPulling="2025-09-29 21:39:22.949232824 +0000 UTC m=+840.926345505" lastFinishedPulling="2025-09-29 21:39:33.809995212 +0000 UTC m=+851.787107883" observedRunningTime="2025-09-29 21:39:36.611228885 +0000 UTC m=+854.588341566" watchObservedRunningTime="2025-09-29 21:39:36.616667915 +0000 UTC m=+854.593780586" Sep 29 21:39:36 crc kubenswrapper[4911]: I0929 21:39:36.629246 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-2gcq4" podStartSLOduration=5.21271866 podStartE2EDuration="15.629228896s" podCreationTimestamp="2025-09-29 21:39:21 +0000 UTC" firstStartedPulling="2025-09-29 21:39:23.348781936 +0000 UTC m=+841.325894607" lastFinishedPulling="2025-09-29 21:39:33.765292172 +0000 UTC m=+851.742404843" observedRunningTime="2025-09-29 21:39:36.624074935 +0000 UTC m=+854.601187616" watchObservedRunningTime="2025-09-29 21:39:36.629228896 +0000 UTC m=+854.606341587" Sep 29 21:39:36 crc kubenswrapper[4911]: I0929 21:39:36.648693 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-ssgn9" podStartSLOduration=5.334319747 podStartE2EDuration="16.64866914s" podCreationTimestamp="2025-09-29 21:39:20 +0000 UTC" firstStartedPulling="2025-09-29 21:39:22.484398639 +0000 UTC m=+840.461511310" lastFinishedPulling="2025-09-29 21:39:33.798748032 +0000 UTC m=+851.775860703" observedRunningTime="2025-09-29 21:39:36.646003577 +0000 UTC m=+854.623116268" watchObservedRunningTime="2025-09-29 21:39:36.64866914 +0000 UTC m=+854.625781831" Sep 29 21:39:36 crc kubenswrapper[4911]: I0929 21:39:36.670104 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-96r7j" podStartSLOduration=5.798698505 podStartE2EDuration="16.670087867s" podCreationTimestamp="2025-09-29 21:39:20 +0000 UTC" firstStartedPulling="2025-09-29 21:39:22.928543967 +0000 UTC m=+840.905656648" lastFinishedPulling="2025-09-29 21:39:33.799933339 +0000 UTC m=+851.777046010" observedRunningTime="2025-09-29 21:39:36.664970487 +0000 UTC m=+854.642083168" watchObservedRunningTime="2025-09-29 21:39:36.670087867 +0000 UTC m=+854.647200548" Sep 29 21:39:37 crc kubenswrapper[4911]: I0929 21:39:37.503399 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-ssgn9" Sep 29 21:39:37 crc kubenswrapper[4911]: I0929 21:39:37.503471 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-lvplw" Sep 29 21:39:41 crc kubenswrapper[4911]: I0929 21:39:41.006747 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-82p2v" Sep 29 21:39:41 crc kubenswrapper[4911]: I0929 21:39:41.057611 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-b8mfd" Sep 29 21:39:41 crc kubenswrapper[4911]: I0929 21:39:41.079651 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-ssgn9" Sep 29 21:39:41 crc kubenswrapper[4911]: I0929 21:39:41.108228 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-ps6zb" Sep 29 21:39:41 crc kubenswrapper[4911]: I0929 21:39:41.153080 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-2rlqk" Sep 29 21:39:41 crc kubenswrapper[4911]: I0929 21:39:41.186432 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-cnn7f" Sep 29 21:39:41 crc kubenswrapper[4911]: I0929 21:39:41.233253 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-l6zpg" Sep 29 21:39:41 crc kubenswrapper[4911]: I0929 21:39:41.296108 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-96r7j" Sep 29 21:39:41 crc kubenswrapper[4911]: I0929 21:39:41.370428 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-vdx55" Sep 29 21:39:41 crc kubenswrapper[4911]: I0929 21:39:41.394678 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-lvplw" Sep 29 21:39:41 crc kubenswrapper[4911]: I0929 21:39:41.433843 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-zh4ln" Sep 29 21:39:41 crc kubenswrapper[4911]: I0929 21:39:41.512447 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-66c64d68d6-mn744" Sep 29 21:39:41 crc kubenswrapper[4911]: I0929 21:39:41.629212 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-2gcq4" Sep 29 21:39:42 crc kubenswrapper[4911]: I0929 21:39:42.016605 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-j9x29" Sep 29 21:39:45 crc kubenswrapper[4911]: I0929 21:39:45.604207 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-hm6c8" event={"ID":"97810e9c-ad62-45d0-a644-1362696f0087","Type":"ContainerStarted","Data":"d5be94088cacb23468e7ed8c4a9213dbcd83fa10cdbaaba13219064e323cba0b"} Sep 29 21:39:45 crc kubenswrapper[4911]: I0929 21:39:45.605654 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-hm6c8" Sep 29 21:39:45 crc kubenswrapper[4911]: I0929 21:39:45.610438 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-88c7-zrp9j" event={"ID":"66254365-9a3e-4101-beff-6bcbdfe57222","Type":"ContainerStarted","Data":"891f8850dc931551f859b48d3f6e1d0ffd1e37e31ebde36f1150734a10ec336c"} Sep 29 21:39:45 crc kubenswrapper[4911]: I0929 21:39:45.610971 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-88c7-zrp9j" Sep 29 21:39:45 crc kubenswrapper[4911]: I0929 21:39:45.616505 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-fgfrh" event={"ID":"571a100e-c479-4a69-a8e3-8c7b1abe1bc5","Type":"ContainerStarted","Data":"bb4d83122cd5d39d84f3ceafcf861f68d84e3d40af6f4cdb1f76ed4bb637158e"} Sep 29 21:39:45 crc kubenswrapper[4911]: I0929 21:39:45.617040 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-fgfrh" Sep 29 21:39:45 crc kubenswrapper[4911]: I0929 21:39:45.632183 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-hm6c8" podStartSLOduration=3.7949390750000003 podStartE2EDuration="25.632166626s" podCreationTimestamp="2025-09-29 21:39:20 +0000 UTC" firstStartedPulling="2025-09-29 21:39:23.36435133 +0000 UTC m=+841.341464001" lastFinishedPulling="2025-09-29 21:39:45.201578871 +0000 UTC m=+863.178691552" observedRunningTime="2025-09-29 21:39:45.631762393 +0000 UTC m=+863.608875074" watchObservedRunningTime="2025-09-29 21:39:45.632166626 +0000 UTC m=+863.609279307" Sep 29 21:39:45 crc kubenswrapper[4911]: I0929 21:39:45.654423 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-88c7-zrp9j" podStartSLOduration=6.08455415 podStartE2EDuration="25.654403398s" podCreationTimestamp="2025-09-29 21:39:20 +0000 UTC" firstStartedPulling="2025-09-29 21:39:23.375654289 +0000 UTC m=+841.352766960" lastFinishedPulling="2025-09-29 21:39:42.945503537 +0000 UTC m=+860.922616208" observedRunningTime="2025-09-29 21:39:45.65032325 +0000 UTC m=+863.627435921" watchObservedRunningTime="2025-09-29 21:39:45.654403398 +0000 UTC m=+863.631516069" Sep 29 21:39:46 crc kubenswrapper[4911]: I0929 21:39:46.625512 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-k6296" event={"ID":"f86ad598-06ae-4ff1-90fc-1770d3b9797c","Type":"ContainerStarted","Data":"87a5b9a82a69a21247f95cf3257f0704fa2df30488961db09e22616cde770ec4"} Sep 29 21:39:46 crc kubenswrapper[4911]: I0929 21:39:46.625781 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-k6296" Sep 29 21:39:46 crc kubenswrapper[4911]: I0929 21:39:46.627133 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-nh8dq" event={"ID":"96036e4c-a554-4d6c-8cd8-ef098c91f3a5","Type":"ContainerStarted","Data":"465f240adbac41389126a79fe1ce6f9da5a28d8e9ad85f6988e659189d567a06"} Sep 29 21:39:46 crc kubenswrapper[4911]: I0929 21:39:46.629492 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-f66b554c6-d6xcc" event={"ID":"95ec5351-b5ce-417e-ba1c-dcd76592fa6b","Type":"ContainerStarted","Data":"716a1d47f44162927a3a589c02bd23f783d9057e6aa42e7ff38b3bf306dc94ac"} Sep 29 21:39:46 crc kubenswrapper[4911]: I0929 21:39:46.629952 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-f66b554c6-d6xcc" Sep 29 21:39:46 crc kubenswrapper[4911]: I0929 21:39:46.631548 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-dbm6k" event={"ID":"a9b7bc05-a0dc-421b-9e13-b00f3b8759f2","Type":"ContainerStarted","Data":"8d634df8928c37bd6d72687100615feffd60dd306b70fa0e1e95cc65e59d07c6"} Sep 29 21:39:46 crc kubenswrapper[4911]: I0929 21:39:46.632003 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-dbm6k" Sep 29 21:39:46 crc kubenswrapper[4911]: I0929 21:39:46.634101 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-dj2hs" event={"ID":"89b4d71d-9da4-43af-a8a9-54c89c771c22","Type":"ContainerStarted","Data":"dd4a4f92886876ba41fa36126d83a70321746a0510420a0d5c44d95a5c256ca4"} Sep 29 21:39:46 crc kubenswrapper[4911]: I0929 21:39:46.634843 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-dj2hs" Sep 29 21:39:46 crc kubenswrapper[4911]: I0929 21:39:46.646054 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-fgfrh" podStartSLOduration=6.258342875 podStartE2EDuration="26.646036206s" podCreationTimestamp="2025-09-29 21:39:20 +0000 UTC" firstStartedPulling="2025-09-29 21:39:23.054872369 +0000 UTC m=+841.031985040" lastFinishedPulling="2025-09-29 21:39:43.44256569 +0000 UTC m=+861.419678371" observedRunningTime="2025-09-29 21:39:45.66603905 +0000 UTC m=+863.643151731" watchObservedRunningTime="2025-09-29 21:39:46.646036206 +0000 UTC m=+864.623148887" Sep 29 21:39:46 crc kubenswrapper[4911]: I0929 21:39:46.648037 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-k6296" podStartSLOduration=3.790008274 podStartE2EDuration="25.648029668s" podCreationTimestamp="2025-09-29 21:39:21 +0000 UTC" firstStartedPulling="2025-09-29 21:39:23.383999845 +0000 UTC m=+841.361112516" lastFinishedPulling="2025-09-29 21:39:45.242021219 +0000 UTC m=+863.219133910" observedRunningTime="2025-09-29 21:39:46.642083253 +0000 UTC m=+864.619195944" watchObservedRunningTime="2025-09-29 21:39:46.648029668 +0000 UTC m=+864.625142359" Sep 29 21:39:46 crc kubenswrapper[4911]: I0929 21:39:46.662402 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-f66b554c6-d6xcc" podStartSLOduration=3.8364287470000003 podStartE2EDuration="25.662374364s" podCreationTimestamp="2025-09-29 21:39:21 +0000 UTC" firstStartedPulling="2025-09-29 21:39:23.382302241 +0000 UTC m=+841.359414912" lastFinishedPulling="2025-09-29 21:39:45.208247848 +0000 UTC m=+863.185360529" observedRunningTime="2025-09-29 21:39:46.659948479 +0000 UTC m=+864.637061190" watchObservedRunningTime="2025-09-29 21:39:46.662374364 +0000 UTC m=+864.639487075" Sep 29 21:39:46 crc kubenswrapper[4911]: I0929 21:39:46.683302 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-nh8dq" podStartSLOduration=3.758606374 podStartE2EDuration="25.683279425s" podCreationTimestamp="2025-09-29 21:39:21 +0000 UTC" firstStartedPulling="2025-09-29 21:39:23.362455551 +0000 UTC m=+841.339568222" lastFinishedPulling="2025-09-29 21:39:45.287128562 +0000 UTC m=+863.264241273" observedRunningTime="2025-09-29 21:39:46.677930148 +0000 UTC m=+864.655042869" watchObservedRunningTime="2025-09-29 21:39:46.683279425 +0000 UTC m=+864.660392106" Sep 29 21:39:46 crc kubenswrapper[4911]: I0929 21:39:46.706185 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-dj2hs" podStartSLOduration=4.873196672 podStartE2EDuration="26.706161626s" podCreationTimestamp="2025-09-29 21:39:20 +0000 UTC" firstStartedPulling="2025-09-29 21:39:23.353709473 +0000 UTC m=+841.330822144" lastFinishedPulling="2025-09-29 21:39:45.186674387 +0000 UTC m=+863.163787098" observedRunningTime="2025-09-29 21:39:46.699652974 +0000 UTC m=+864.676765675" watchObservedRunningTime="2025-09-29 21:39:46.706161626 +0000 UTC m=+864.683274317" Sep 29 21:39:46 crc kubenswrapper[4911]: I0929 21:39:46.730065 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-dbm6k" podStartSLOduration=4.807601837 podStartE2EDuration="26.73004709s" podCreationTimestamp="2025-09-29 21:39:20 +0000 UTC" firstStartedPulling="2025-09-29 21:39:23.361715617 +0000 UTC m=+841.338828288" lastFinishedPulling="2025-09-29 21:39:45.28416085 +0000 UTC m=+863.261273541" observedRunningTime="2025-09-29 21:39:46.723917059 +0000 UTC m=+864.701029760" watchObservedRunningTime="2025-09-29 21:39:46.73004709 +0000 UTC m=+864.707159771" Sep 29 21:39:51 crc kubenswrapper[4911]: I0929 21:39:51.109187 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-fgfrh" Sep 29 21:39:51 crc kubenswrapper[4911]: I0929 21:39:51.244256 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-88c7-zrp9j" Sep 29 21:39:51 crc kubenswrapper[4911]: I0929 21:39:51.260838 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-dj2hs" Sep 29 21:39:51 crc kubenswrapper[4911]: I0929 21:39:51.321981 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-hm6c8" Sep 29 21:39:51 crc kubenswrapper[4911]: I0929 21:39:51.406954 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-dbm6k" Sep 29 21:39:51 crc kubenswrapper[4911]: I0929 21:39:51.445345 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-k6296" Sep 29 21:39:51 crc kubenswrapper[4911]: I0929 21:39:51.617604 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-f66b554c6-d6xcc" Sep 29 21:39:55 crc kubenswrapper[4911]: I0929 21:39:55.211687 4911 patch_prober.go:28] interesting pod/machine-config-daemon-w647f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 21:39:55 crc kubenswrapper[4911]: I0929 21:39:55.212082 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 21:40:09 crc kubenswrapper[4911]: I0929 21:40:09.784120 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-4vjqh"] Sep 29 21:40:09 crc kubenswrapper[4911]: I0929 21:40:09.785568 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-4vjqh" Sep 29 21:40:09 crc kubenswrapper[4911]: I0929 21:40:09.790148 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Sep 29 21:40:09 crc kubenswrapper[4911]: I0929 21:40:09.790276 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-9mz25" Sep 29 21:40:09 crc kubenswrapper[4911]: I0929 21:40:09.790454 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Sep 29 21:40:09 crc kubenswrapper[4911]: I0929 21:40:09.790501 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Sep 29 21:40:09 crc kubenswrapper[4911]: I0929 21:40:09.796801 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-4vjqh"] Sep 29 21:40:09 crc kubenswrapper[4911]: I0929 21:40:09.864367 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-2cxwm"] Sep 29 21:40:09 crc kubenswrapper[4911]: I0929 21:40:09.865600 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-2cxwm" Sep 29 21:40:09 crc kubenswrapper[4911]: I0929 21:40:09.868303 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Sep 29 21:40:09 crc kubenswrapper[4911]: I0929 21:40:09.876867 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-2cxwm"] Sep 29 21:40:09 crc kubenswrapper[4911]: I0929 21:40:09.916748 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f9c7e4f7-ea6f-4062-8215-f99cd085f5c0-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-2cxwm\" (UID: \"f9c7e4f7-ea6f-4062-8215-f99cd085f5c0\") " pod="openstack/dnsmasq-dns-78dd6ddcc-2cxwm" Sep 29 21:40:09 crc kubenswrapper[4911]: I0929 21:40:09.916814 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4g6kg\" (UniqueName: \"kubernetes.io/projected/f9c7e4f7-ea6f-4062-8215-f99cd085f5c0-kube-api-access-4g6kg\") pod \"dnsmasq-dns-78dd6ddcc-2cxwm\" (UID: \"f9c7e4f7-ea6f-4062-8215-f99cd085f5c0\") " pod="openstack/dnsmasq-dns-78dd6ddcc-2cxwm" Sep 29 21:40:09 crc kubenswrapper[4911]: I0929 21:40:09.917080 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n5glp\" (UniqueName: \"kubernetes.io/projected/9ba51b2f-ce7e-4d51-b30c-0917b93a2167-kube-api-access-n5glp\") pod \"dnsmasq-dns-675f4bcbfc-4vjqh\" (UID: \"9ba51b2f-ce7e-4d51-b30c-0917b93a2167\") " pod="openstack/dnsmasq-dns-675f4bcbfc-4vjqh" Sep 29 21:40:09 crc kubenswrapper[4911]: I0929 21:40:09.917209 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9ba51b2f-ce7e-4d51-b30c-0917b93a2167-config\") pod \"dnsmasq-dns-675f4bcbfc-4vjqh\" (UID: \"9ba51b2f-ce7e-4d51-b30c-0917b93a2167\") " pod="openstack/dnsmasq-dns-675f4bcbfc-4vjqh" Sep 29 21:40:09 crc kubenswrapper[4911]: I0929 21:40:09.917333 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f9c7e4f7-ea6f-4062-8215-f99cd085f5c0-config\") pod \"dnsmasq-dns-78dd6ddcc-2cxwm\" (UID: \"f9c7e4f7-ea6f-4062-8215-f99cd085f5c0\") " pod="openstack/dnsmasq-dns-78dd6ddcc-2cxwm" Sep 29 21:40:10 crc kubenswrapper[4911]: I0929 21:40:10.018630 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f9c7e4f7-ea6f-4062-8215-f99cd085f5c0-config\") pod \"dnsmasq-dns-78dd6ddcc-2cxwm\" (UID: \"f9c7e4f7-ea6f-4062-8215-f99cd085f5c0\") " pod="openstack/dnsmasq-dns-78dd6ddcc-2cxwm" Sep 29 21:40:10 crc kubenswrapper[4911]: I0929 21:40:10.018698 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f9c7e4f7-ea6f-4062-8215-f99cd085f5c0-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-2cxwm\" (UID: \"f9c7e4f7-ea6f-4062-8215-f99cd085f5c0\") " pod="openstack/dnsmasq-dns-78dd6ddcc-2cxwm" Sep 29 21:40:10 crc kubenswrapper[4911]: I0929 21:40:10.018720 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4g6kg\" (UniqueName: \"kubernetes.io/projected/f9c7e4f7-ea6f-4062-8215-f99cd085f5c0-kube-api-access-4g6kg\") pod \"dnsmasq-dns-78dd6ddcc-2cxwm\" (UID: \"f9c7e4f7-ea6f-4062-8215-f99cd085f5c0\") " pod="openstack/dnsmasq-dns-78dd6ddcc-2cxwm" Sep 29 21:40:10 crc kubenswrapper[4911]: I0929 21:40:10.018777 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n5glp\" (UniqueName: \"kubernetes.io/projected/9ba51b2f-ce7e-4d51-b30c-0917b93a2167-kube-api-access-n5glp\") pod \"dnsmasq-dns-675f4bcbfc-4vjqh\" (UID: \"9ba51b2f-ce7e-4d51-b30c-0917b93a2167\") " pod="openstack/dnsmasq-dns-675f4bcbfc-4vjqh" Sep 29 21:40:10 crc kubenswrapper[4911]: I0929 21:40:10.018810 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9ba51b2f-ce7e-4d51-b30c-0917b93a2167-config\") pod \"dnsmasq-dns-675f4bcbfc-4vjqh\" (UID: \"9ba51b2f-ce7e-4d51-b30c-0917b93a2167\") " pod="openstack/dnsmasq-dns-675f4bcbfc-4vjqh" Sep 29 21:40:10 crc kubenswrapper[4911]: I0929 21:40:10.019594 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9ba51b2f-ce7e-4d51-b30c-0917b93a2167-config\") pod \"dnsmasq-dns-675f4bcbfc-4vjqh\" (UID: \"9ba51b2f-ce7e-4d51-b30c-0917b93a2167\") " pod="openstack/dnsmasq-dns-675f4bcbfc-4vjqh" Sep 29 21:40:10 crc kubenswrapper[4911]: I0929 21:40:10.020163 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f9c7e4f7-ea6f-4062-8215-f99cd085f5c0-config\") pod \"dnsmasq-dns-78dd6ddcc-2cxwm\" (UID: \"f9c7e4f7-ea6f-4062-8215-f99cd085f5c0\") " pod="openstack/dnsmasq-dns-78dd6ddcc-2cxwm" Sep 29 21:40:10 crc kubenswrapper[4911]: I0929 21:40:10.020686 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f9c7e4f7-ea6f-4062-8215-f99cd085f5c0-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-2cxwm\" (UID: \"f9c7e4f7-ea6f-4062-8215-f99cd085f5c0\") " pod="openstack/dnsmasq-dns-78dd6ddcc-2cxwm" Sep 29 21:40:10 crc kubenswrapper[4911]: I0929 21:40:10.037250 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4g6kg\" (UniqueName: \"kubernetes.io/projected/f9c7e4f7-ea6f-4062-8215-f99cd085f5c0-kube-api-access-4g6kg\") pod \"dnsmasq-dns-78dd6ddcc-2cxwm\" (UID: \"f9c7e4f7-ea6f-4062-8215-f99cd085f5c0\") " pod="openstack/dnsmasq-dns-78dd6ddcc-2cxwm" Sep 29 21:40:10 crc kubenswrapper[4911]: I0929 21:40:10.037905 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n5glp\" (UniqueName: \"kubernetes.io/projected/9ba51b2f-ce7e-4d51-b30c-0917b93a2167-kube-api-access-n5glp\") pod \"dnsmasq-dns-675f4bcbfc-4vjqh\" (UID: \"9ba51b2f-ce7e-4d51-b30c-0917b93a2167\") " pod="openstack/dnsmasq-dns-675f4bcbfc-4vjqh" Sep 29 21:40:10 crc kubenswrapper[4911]: I0929 21:40:10.103562 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-4vjqh" Sep 29 21:40:10 crc kubenswrapper[4911]: I0929 21:40:10.189286 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-2cxwm" Sep 29 21:40:10 crc kubenswrapper[4911]: I0929 21:40:10.467219 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-2cxwm"] Sep 29 21:40:10 crc kubenswrapper[4911]: W0929 21:40:10.471853 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf9c7e4f7_ea6f_4062_8215_f99cd085f5c0.slice/crio-2229db9d4e8e4ea42e174854ff9108b93820bc0c53cbf1f9801078a175c1ab00 WatchSource:0}: Error finding container 2229db9d4e8e4ea42e174854ff9108b93820bc0c53cbf1f9801078a175c1ab00: Status 404 returned error can't find the container with id 2229db9d4e8e4ea42e174854ff9108b93820bc0c53cbf1f9801078a175c1ab00 Sep 29 21:40:10 crc kubenswrapper[4911]: I0929 21:40:10.652796 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-4vjqh"] Sep 29 21:40:10 crc kubenswrapper[4911]: W0929 21:40:10.656230 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9ba51b2f_ce7e_4d51_b30c_0917b93a2167.slice/crio-b25560403965ce5bf6cfff6505d90f95d03ea7c65e15f9c5c2f5916ee008bb93 WatchSource:0}: Error finding container b25560403965ce5bf6cfff6505d90f95d03ea7c65e15f9c5c2f5916ee008bb93: Status 404 returned error can't find the container with id b25560403965ce5bf6cfff6505d90f95d03ea7c65e15f9c5c2f5916ee008bb93 Sep 29 21:40:10 crc kubenswrapper[4911]: I0929 21:40:10.855896 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-2cxwm" event={"ID":"f9c7e4f7-ea6f-4062-8215-f99cd085f5c0","Type":"ContainerStarted","Data":"2229db9d4e8e4ea42e174854ff9108b93820bc0c53cbf1f9801078a175c1ab00"} Sep 29 21:40:10 crc kubenswrapper[4911]: I0929 21:40:10.857995 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-4vjqh" event={"ID":"9ba51b2f-ce7e-4d51-b30c-0917b93a2167","Type":"ContainerStarted","Data":"b25560403965ce5bf6cfff6505d90f95d03ea7c65e15f9c5c2f5916ee008bb93"} Sep 29 21:40:12 crc kubenswrapper[4911]: I0929 21:40:12.749112 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-4vjqh"] Sep 29 21:40:12 crc kubenswrapper[4911]: I0929 21:40:12.776239 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-xd7cs"] Sep 29 21:40:12 crc kubenswrapper[4911]: I0929 21:40:12.777342 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-xd7cs" Sep 29 21:40:12 crc kubenswrapper[4911]: I0929 21:40:12.790211 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-xd7cs"] Sep 29 21:40:12 crc kubenswrapper[4911]: I0929 21:40:12.867287 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d532207a-cd4a-4a4d-885e-c9d15d4d0d8a-dns-svc\") pod \"dnsmasq-dns-666b6646f7-xd7cs\" (UID: \"d532207a-cd4a-4a4d-885e-c9d15d4d0d8a\") " pod="openstack/dnsmasq-dns-666b6646f7-xd7cs" Sep 29 21:40:12 crc kubenswrapper[4911]: I0929 21:40:12.867334 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2jj5s\" (UniqueName: \"kubernetes.io/projected/d532207a-cd4a-4a4d-885e-c9d15d4d0d8a-kube-api-access-2jj5s\") pod \"dnsmasq-dns-666b6646f7-xd7cs\" (UID: \"d532207a-cd4a-4a4d-885e-c9d15d4d0d8a\") " pod="openstack/dnsmasq-dns-666b6646f7-xd7cs" Sep 29 21:40:12 crc kubenswrapper[4911]: I0929 21:40:12.867448 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d532207a-cd4a-4a4d-885e-c9d15d4d0d8a-config\") pod \"dnsmasq-dns-666b6646f7-xd7cs\" (UID: \"d532207a-cd4a-4a4d-885e-c9d15d4d0d8a\") " pod="openstack/dnsmasq-dns-666b6646f7-xd7cs" Sep 29 21:40:12 crc kubenswrapper[4911]: I0929 21:40:12.972706 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d532207a-cd4a-4a4d-885e-c9d15d4d0d8a-dns-svc\") pod \"dnsmasq-dns-666b6646f7-xd7cs\" (UID: \"d532207a-cd4a-4a4d-885e-c9d15d4d0d8a\") " pod="openstack/dnsmasq-dns-666b6646f7-xd7cs" Sep 29 21:40:12 crc kubenswrapper[4911]: I0929 21:40:12.972767 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2jj5s\" (UniqueName: \"kubernetes.io/projected/d532207a-cd4a-4a4d-885e-c9d15d4d0d8a-kube-api-access-2jj5s\") pod \"dnsmasq-dns-666b6646f7-xd7cs\" (UID: \"d532207a-cd4a-4a4d-885e-c9d15d4d0d8a\") " pod="openstack/dnsmasq-dns-666b6646f7-xd7cs" Sep 29 21:40:12 crc kubenswrapper[4911]: I0929 21:40:12.972820 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d532207a-cd4a-4a4d-885e-c9d15d4d0d8a-config\") pod \"dnsmasq-dns-666b6646f7-xd7cs\" (UID: \"d532207a-cd4a-4a4d-885e-c9d15d4d0d8a\") " pod="openstack/dnsmasq-dns-666b6646f7-xd7cs" Sep 29 21:40:12 crc kubenswrapper[4911]: I0929 21:40:12.973513 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d532207a-cd4a-4a4d-885e-c9d15d4d0d8a-dns-svc\") pod \"dnsmasq-dns-666b6646f7-xd7cs\" (UID: \"d532207a-cd4a-4a4d-885e-c9d15d4d0d8a\") " pod="openstack/dnsmasq-dns-666b6646f7-xd7cs" Sep 29 21:40:12 crc kubenswrapper[4911]: I0929 21:40:12.973607 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d532207a-cd4a-4a4d-885e-c9d15d4d0d8a-config\") pod \"dnsmasq-dns-666b6646f7-xd7cs\" (UID: \"d532207a-cd4a-4a4d-885e-c9d15d4d0d8a\") " pod="openstack/dnsmasq-dns-666b6646f7-xd7cs" Sep 29 21:40:13 crc kubenswrapper[4911]: I0929 21:40:13.007002 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2jj5s\" (UniqueName: \"kubernetes.io/projected/d532207a-cd4a-4a4d-885e-c9d15d4d0d8a-kube-api-access-2jj5s\") pod \"dnsmasq-dns-666b6646f7-xd7cs\" (UID: \"d532207a-cd4a-4a4d-885e-c9d15d4d0d8a\") " pod="openstack/dnsmasq-dns-666b6646f7-xd7cs" Sep 29 21:40:13 crc kubenswrapper[4911]: I0929 21:40:13.055124 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-2cxwm"] Sep 29 21:40:13 crc kubenswrapper[4911]: I0929 21:40:13.076558 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-dwtnp"] Sep 29 21:40:13 crc kubenswrapper[4911]: I0929 21:40:13.078082 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-dwtnp" Sep 29 21:40:13 crc kubenswrapper[4911]: I0929 21:40:13.097197 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-dwtnp"] Sep 29 21:40:13 crc kubenswrapper[4911]: I0929 21:40:13.104555 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-xd7cs" Sep 29 21:40:13 crc kubenswrapper[4911]: I0929 21:40:13.174973 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f4dcfa4a-a27b-4b42-938d-d7509e1d2f20-config\") pod \"dnsmasq-dns-57d769cc4f-dwtnp\" (UID: \"f4dcfa4a-a27b-4b42-938d-d7509e1d2f20\") " pod="openstack/dnsmasq-dns-57d769cc4f-dwtnp" Sep 29 21:40:13 crc kubenswrapper[4911]: I0929 21:40:13.175055 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2rmnl\" (UniqueName: \"kubernetes.io/projected/f4dcfa4a-a27b-4b42-938d-d7509e1d2f20-kube-api-access-2rmnl\") pod \"dnsmasq-dns-57d769cc4f-dwtnp\" (UID: \"f4dcfa4a-a27b-4b42-938d-d7509e1d2f20\") " pod="openstack/dnsmasq-dns-57d769cc4f-dwtnp" Sep 29 21:40:13 crc kubenswrapper[4911]: I0929 21:40:13.175105 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f4dcfa4a-a27b-4b42-938d-d7509e1d2f20-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-dwtnp\" (UID: \"f4dcfa4a-a27b-4b42-938d-d7509e1d2f20\") " pod="openstack/dnsmasq-dns-57d769cc4f-dwtnp" Sep 29 21:40:13 crc kubenswrapper[4911]: I0929 21:40:13.276738 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f4dcfa4a-a27b-4b42-938d-d7509e1d2f20-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-dwtnp\" (UID: \"f4dcfa4a-a27b-4b42-938d-d7509e1d2f20\") " pod="openstack/dnsmasq-dns-57d769cc4f-dwtnp" Sep 29 21:40:13 crc kubenswrapper[4911]: I0929 21:40:13.276807 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f4dcfa4a-a27b-4b42-938d-d7509e1d2f20-config\") pod \"dnsmasq-dns-57d769cc4f-dwtnp\" (UID: \"f4dcfa4a-a27b-4b42-938d-d7509e1d2f20\") " pod="openstack/dnsmasq-dns-57d769cc4f-dwtnp" Sep 29 21:40:13 crc kubenswrapper[4911]: I0929 21:40:13.276868 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2rmnl\" (UniqueName: \"kubernetes.io/projected/f4dcfa4a-a27b-4b42-938d-d7509e1d2f20-kube-api-access-2rmnl\") pod \"dnsmasq-dns-57d769cc4f-dwtnp\" (UID: \"f4dcfa4a-a27b-4b42-938d-d7509e1d2f20\") " pod="openstack/dnsmasq-dns-57d769cc4f-dwtnp" Sep 29 21:40:13 crc kubenswrapper[4911]: I0929 21:40:13.277828 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f4dcfa4a-a27b-4b42-938d-d7509e1d2f20-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-dwtnp\" (UID: \"f4dcfa4a-a27b-4b42-938d-d7509e1d2f20\") " pod="openstack/dnsmasq-dns-57d769cc4f-dwtnp" Sep 29 21:40:13 crc kubenswrapper[4911]: I0929 21:40:13.278315 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f4dcfa4a-a27b-4b42-938d-d7509e1d2f20-config\") pod \"dnsmasq-dns-57d769cc4f-dwtnp\" (UID: \"f4dcfa4a-a27b-4b42-938d-d7509e1d2f20\") " pod="openstack/dnsmasq-dns-57d769cc4f-dwtnp" Sep 29 21:40:13 crc kubenswrapper[4911]: I0929 21:40:13.292568 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2rmnl\" (UniqueName: \"kubernetes.io/projected/f4dcfa4a-a27b-4b42-938d-d7509e1d2f20-kube-api-access-2rmnl\") pod \"dnsmasq-dns-57d769cc4f-dwtnp\" (UID: \"f4dcfa4a-a27b-4b42-938d-d7509e1d2f20\") " pod="openstack/dnsmasq-dns-57d769cc4f-dwtnp" Sep 29 21:40:13 crc kubenswrapper[4911]: I0929 21:40:13.399574 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-dwtnp" Sep 29 21:40:13 crc kubenswrapper[4911]: I0929 21:40:13.929028 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 21:40:13 crc kubenswrapper[4911]: I0929 21:40:13.931747 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 29 21:40:13 crc kubenswrapper[4911]: I0929 21:40:13.936428 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Sep 29 21:40:13 crc kubenswrapper[4911]: I0929 21:40:13.936478 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Sep 29 21:40:13 crc kubenswrapper[4911]: I0929 21:40:13.937553 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Sep 29 21:40:13 crc kubenswrapper[4911]: I0929 21:40:13.937619 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Sep 29 21:40:13 crc kubenswrapper[4911]: I0929 21:40:13.937728 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Sep 29 21:40:13 crc kubenswrapper[4911]: I0929 21:40:13.939991 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-tlxqf" Sep 29 21:40:13 crc kubenswrapper[4911]: I0929 21:40:13.944660 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Sep 29 21:40:13 crc kubenswrapper[4911]: I0929 21:40:13.953628 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 21:40:13 crc kubenswrapper[4911]: I0929 21:40:13.986846 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/16704d0f-ad69-4cc9-890a-77c268d78151-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"16704d0f-ad69-4cc9-890a-77c268d78151\") " pod="openstack/rabbitmq-server-0" Sep 29 21:40:13 crc kubenswrapper[4911]: I0929 21:40:13.987217 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/16704d0f-ad69-4cc9-890a-77c268d78151-config-data\") pod \"rabbitmq-server-0\" (UID: \"16704d0f-ad69-4cc9-890a-77c268d78151\") " pod="openstack/rabbitmq-server-0" Sep 29 21:40:13 crc kubenswrapper[4911]: I0929 21:40:13.987368 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-server-0\" (UID: \"16704d0f-ad69-4cc9-890a-77c268d78151\") " pod="openstack/rabbitmq-server-0" Sep 29 21:40:13 crc kubenswrapper[4911]: I0929 21:40:13.987541 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/16704d0f-ad69-4cc9-890a-77c268d78151-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"16704d0f-ad69-4cc9-890a-77c268d78151\") " pod="openstack/rabbitmq-server-0" Sep 29 21:40:13 crc kubenswrapper[4911]: I0929 21:40:13.987704 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/16704d0f-ad69-4cc9-890a-77c268d78151-pod-info\") pod \"rabbitmq-server-0\" (UID: \"16704d0f-ad69-4cc9-890a-77c268d78151\") " pod="openstack/rabbitmq-server-0" Sep 29 21:40:13 crc kubenswrapper[4911]: I0929 21:40:13.987876 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/16704d0f-ad69-4cc9-890a-77c268d78151-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"16704d0f-ad69-4cc9-890a-77c268d78151\") " pod="openstack/rabbitmq-server-0" Sep 29 21:40:13 crc kubenswrapper[4911]: I0929 21:40:13.988031 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/16704d0f-ad69-4cc9-890a-77c268d78151-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"16704d0f-ad69-4cc9-890a-77c268d78151\") " pod="openstack/rabbitmq-server-0" Sep 29 21:40:13 crc kubenswrapper[4911]: I0929 21:40:13.988157 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/16704d0f-ad69-4cc9-890a-77c268d78151-server-conf\") pod \"rabbitmq-server-0\" (UID: \"16704d0f-ad69-4cc9-890a-77c268d78151\") " pod="openstack/rabbitmq-server-0" Sep 29 21:40:13 crc kubenswrapper[4911]: I0929 21:40:13.988297 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/16704d0f-ad69-4cc9-890a-77c268d78151-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"16704d0f-ad69-4cc9-890a-77c268d78151\") " pod="openstack/rabbitmq-server-0" Sep 29 21:40:13 crc kubenswrapper[4911]: I0929 21:40:13.988453 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/16704d0f-ad69-4cc9-890a-77c268d78151-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"16704d0f-ad69-4cc9-890a-77c268d78151\") " pod="openstack/rabbitmq-server-0" Sep 29 21:40:13 crc kubenswrapper[4911]: I0929 21:40:13.988582 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7cx7z\" (UniqueName: \"kubernetes.io/projected/16704d0f-ad69-4cc9-890a-77c268d78151-kube-api-access-7cx7z\") pod \"rabbitmq-server-0\" (UID: \"16704d0f-ad69-4cc9-890a-77c268d78151\") " pod="openstack/rabbitmq-server-0" Sep 29 21:40:14 crc kubenswrapper[4911]: I0929 21:40:14.090200 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7cx7z\" (UniqueName: \"kubernetes.io/projected/16704d0f-ad69-4cc9-890a-77c268d78151-kube-api-access-7cx7z\") pod \"rabbitmq-server-0\" (UID: \"16704d0f-ad69-4cc9-890a-77c268d78151\") " pod="openstack/rabbitmq-server-0" Sep 29 21:40:14 crc kubenswrapper[4911]: I0929 21:40:14.090288 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/16704d0f-ad69-4cc9-890a-77c268d78151-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"16704d0f-ad69-4cc9-890a-77c268d78151\") " pod="openstack/rabbitmq-server-0" Sep 29 21:40:14 crc kubenswrapper[4911]: I0929 21:40:14.090314 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/16704d0f-ad69-4cc9-890a-77c268d78151-config-data\") pod \"rabbitmq-server-0\" (UID: \"16704d0f-ad69-4cc9-890a-77c268d78151\") " pod="openstack/rabbitmq-server-0" Sep 29 21:40:14 crc kubenswrapper[4911]: I0929 21:40:14.090333 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-server-0\" (UID: \"16704d0f-ad69-4cc9-890a-77c268d78151\") " pod="openstack/rabbitmq-server-0" Sep 29 21:40:14 crc kubenswrapper[4911]: I0929 21:40:14.090372 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/16704d0f-ad69-4cc9-890a-77c268d78151-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"16704d0f-ad69-4cc9-890a-77c268d78151\") " pod="openstack/rabbitmq-server-0" Sep 29 21:40:14 crc kubenswrapper[4911]: I0929 21:40:14.090393 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/16704d0f-ad69-4cc9-890a-77c268d78151-pod-info\") pod \"rabbitmq-server-0\" (UID: \"16704d0f-ad69-4cc9-890a-77c268d78151\") " pod="openstack/rabbitmq-server-0" Sep 29 21:40:14 crc kubenswrapper[4911]: I0929 21:40:14.090410 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/16704d0f-ad69-4cc9-890a-77c268d78151-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"16704d0f-ad69-4cc9-890a-77c268d78151\") " pod="openstack/rabbitmq-server-0" Sep 29 21:40:14 crc kubenswrapper[4911]: I0929 21:40:14.090423 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/16704d0f-ad69-4cc9-890a-77c268d78151-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"16704d0f-ad69-4cc9-890a-77c268d78151\") " pod="openstack/rabbitmq-server-0" Sep 29 21:40:14 crc kubenswrapper[4911]: I0929 21:40:14.090440 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/16704d0f-ad69-4cc9-890a-77c268d78151-server-conf\") pod \"rabbitmq-server-0\" (UID: \"16704d0f-ad69-4cc9-890a-77c268d78151\") " pod="openstack/rabbitmq-server-0" Sep 29 21:40:14 crc kubenswrapper[4911]: I0929 21:40:14.090458 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/16704d0f-ad69-4cc9-890a-77c268d78151-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"16704d0f-ad69-4cc9-890a-77c268d78151\") " pod="openstack/rabbitmq-server-0" Sep 29 21:40:14 crc kubenswrapper[4911]: I0929 21:40:14.090479 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/16704d0f-ad69-4cc9-890a-77c268d78151-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"16704d0f-ad69-4cc9-890a-77c268d78151\") " pod="openstack/rabbitmq-server-0" Sep 29 21:40:14 crc kubenswrapper[4911]: I0929 21:40:14.091431 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/16704d0f-ad69-4cc9-890a-77c268d78151-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"16704d0f-ad69-4cc9-890a-77c268d78151\") " pod="openstack/rabbitmq-server-0" Sep 29 21:40:14 crc kubenswrapper[4911]: I0929 21:40:14.091868 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/16704d0f-ad69-4cc9-890a-77c268d78151-config-data\") pod \"rabbitmq-server-0\" (UID: \"16704d0f-ad69-4cc9-890a-77c268d78151\") " pod="openstack/rabbitmq-server-0" Sep 29 21:40:14 crc kubenswrapper[4911]: I0929 21:40:14.091922 4911 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-server-0\" (UID: \"16704d0f-ad69-4cc9-890a-77c268d78151\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/rabbitmq-server-0" Sep 29 21:40:14 crc kubenswrapper[4911]: I0929 21:40:14.092308 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/16704d0f-ad69-4cc9-890a-77c268d78151-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"16704d0f-ad69-4cc9-890a-77c268d78151\") " pod="openstack/rabbitmq-server-0" Sep 29 21:40:14 crc kubenswrapper[4911]: I0929 21:40:14.092388 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/16704d0f-ad69-4cc9-890a-77c268d78151-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"16704d0f-ad69-4cc9-890a-77c268d78151\") " pod="openstack/rabbitmq-server-0" Sep 29 21:40:14 crc kubenswrapper[4911]: I0929 21:40:14.092650 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/16704d0f-ad69-4cc9-890a-77c268d78151-server-conf\") pod \"rabbitmq-server-0\" (UID: \"16704d0f-ad69-4cc9-890a-77c268d78151\") " pod="openstack/rabbitmq-server-0" Sep 29 21:40:14 crc kubenswrapper[4911]: I0929 21:40:14.094605 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/16704d0f-ad69-4cc9-890a-77c268d78151-pod-info\") pod \"rabbitmq-server-0\" (UID: \"16704d0f-ad69-4cc9-890a-77c268d78151\") " pod="openstack/rabbitmq-server-0" Sep 29 21:40:14 crc kubenswrapper[4911]: I0929 21:40:14.095998 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/16704d0f-ad69-4cc9-890a-77c268d78151-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"16704d0f-ad69-4cc9-890a-77c268d78151\") " pod="openstack/rabbitmq-server-0" Sep 29 21:40:14 crc kubenswrapper[4911]: I0929 21:40:14.098497 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/16704d0f-ad69-4cc9-890a-77c268d78151-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"16704d0f-ad69-4cc9-890a-77c268d78151\") " pod="openstack/rabbitmq-server-0" Sep 29 21:40:14 crc kubenswrapper[4911]: I0929 21:40:14.104256 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/16704d0f-ad69-4cc9-890a-77c268d78151-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"16704d0f-ad69-4cc9-890a-77c268d78151\") " pod="openstack/rabbitmq-server-0" Sep 29 21:40:14 crc kubenswrapper[4911]: I0929 21:40:14.108107 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7cx7z\" (UniqueName: \"kubernetes.io/projected/16704d0f-ad69-4cc9-890a-77c268d78151-kube-api-access-7cx7z\") pod \"rabbitmq-server-0\" (UID: \"16704d0f-ad69-4cc9-890a-77c268d78151\") " pod="openstack/rabbitmq-server-0" Sep 29 21:40:14 crc kubenswrapper[4911]: I0929 21:40:14.123998 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-server-0\" (UID: \"16704d0f-ad69-4cc9-890a-77c268d78151\") " pod="openstack/rabbitmq-server-0" Sep 29 21:40:14 crc kubenswrapper[4911]: I0929 21:40:14.182916 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 21:40:14 crc kubenswrapper[4911]: I0929 21:40:14.184252 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:40:14 crc kubenswrapper[4911]: I0929 21:40:14.187269 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-99ttz" Sep 29 21:40:14 crc kubenswrapper[4911]: I0929 21:40:14.187579 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Sep 29 21:40:14 crc kubenswrapper[4911]: I0929 21:40:14.187700 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Sep 29 21:40:14 crc kubenswrapper[4911]: I0929 21:40:14.187731 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Sep 29 21:40:14 crc kubenswrapper[4911]: I0929 21:40:14.187875 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Sep 29 21:40:14 crc kubenswrapper[4911]: I0929 21:40:14.188377 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Sep 29 21:40:14 crc kubenswrapper[4911]: I0929 21:40:14.188528 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Sep 29 21:40:14 crc kubenswrapper[4911]: I0929 21:40:14.189017 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 21:40:14 crc kubenswrapper[4911]: I0929 21:40:14.255081 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 29 21:40:14 crc kubenswrapper[4911]: I0929 21:40:14.293299 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/54bedb11-6943-4e34-a221-8dbd2cfd5eee-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"54bedb11-6943-4e34-a221-8dbd2cfd5eee\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:40:14 crc kubenswrapper[4911]: I0929 21:40:14.293364 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/54bedb11-6943-4e34-a221-8dbd2cfd5eee-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"54bedb11-6943-4e34-a221-8dbd2cfd5eee\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:40:14 crc kubenswrapper[4911]: I0929 21:40:14.293380 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/54bedb11-6943-4e34-a221-8dbd2cfd5eee-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"54bedb11-6943-4e34-a221-8dbd2cfd5eee\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:40:14 crc kubenswrapper[4911]: I0929 21:40:14.293453 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/54bedb11-6943-4e34-a221-8dbd2cfd5eee-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"54bedb11-6943-4e34-a221-8dbd2cfd5eee\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:40:14 crc kubenswrapper[4911]: I0929 21:40:14.293495 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/54bedb11-6943-4e34-a221-8dbd2cfd5eee-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"54bedb11-6943-4e34-a221-8dbd2cfd5eee\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:40:14 crc kubenswrapper[4911]: I0929 21:40:14.293518 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/54bedb11-6943-4e34-a221-8dbd2cfd5eee-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"54bedb11-6943-4e34-a221-8dbd2cfd5eee\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:40:14 crc kubenswrapper[4911]: I0929 21:40:14.293563 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/54bedb11-6943-4e34-a221-8dbd2cfd5eee-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"54bedb11-6943-4e34-a221-8dbd2cfd5eee\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:40:14 crc kubenswrapper[4911]: I0929 21:40:14.293595 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/54bedb11-6943-4e34-a221-8dbd2cfd5eee-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"54bedb11-6943-4e34-a221-8dbd2cfd5eee\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:40:14 crc kubenswrapper[4911]: I0929 21:40:14.293616 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/54bedb11-6943-4e34-a221-8dbd2cfd5eee-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"54bedb11-6943-4e34-a221-8dbd2cfd5eee\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:40:14 crc kubenswrapper[4911]: I0929 21:40:14.293736 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9rhbd\" (UniqueName: \"kubernetes.io/projected/54bedb11-6943-4e34-a221-8dbd2cfd5eee-kube-api-access-9rhbd\") pod \"rabbitmq-cell1-server-0\" (UID: \"54bedb11-6943-4e34-a221-8dbd2cfd5eee\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:40:14 crc kubenswrapper[4911]: I0929 21:40:14.293834 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"54bedb11-6943-4e34-a221-8dbd2cfd5eee\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:40:14 crc kubenswrapper[4911]: I0929 21:40:14.395033 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/54bedb11-6943-4e34-a221-8dbd2cfd5eee-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"54bedb11-6943-4e34-a221-8dbd2cfd5eee\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:40:14 crc kubenswrapper[4911]: I0929 21:40:14.395066 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/54bedb11-6943-4e34-a221-8dbd2cfd5eee-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"54bedb11-6943-4e34-a221-8dbd2cfd5eee\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:40:14 crc kubenswrapper[4911]: I0929 21:40:14.395085 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/54bedb11-6943-4e34-a221-8dbd2cfd5eee-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"54bedb11-6943-4e34-a221-8dbd2cfd5eee\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:40:14 crc kubenswrapper[4911]: I0929 21:40:14.395133 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/54bedb11-6943-4e34-a221-8dbd2cfd5eee-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"54bedb11-6943-4e34-a221-8dbd2cfd5eee\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:40:14 crc kubenswrapper[4911]: I0929 21:40:14.395162 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/54bedb11-6943-4e34-a221-8dbd2cfd5eee-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"54bedb11-6943-4e34-a221-8dbd2cfd5eee\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:40:14 crc kubenswrapper[4911]: I0929 21:40:14.395183 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/54bedb11-6943-4e34-a221-8dbd2cfd5eee-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"54bedb11-6943-4e34-a221-8dbd2cfd5eee\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:40:14 crc kubenswrapper[4911]: I0929 21:40:14.395215 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9rhbd\" (UniqueName: \"kubernetes.io/projected/54bedb11-6943-4e34-a221-8dbd2cfd5eee-kube-api-access-9rhbd\") pod \"rabbitmq-cell1-server-0\" (UID: \"54bedb11-6943-4e34-a221-8dbd2cfd5eee\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:40:14 crc kubenswrapper[4911]: I0929 21:40:14.395236 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"54bedb11-6943-4e34-a221-8dbd2cfd5eee\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:40:14 crc kubenswrapper[4911]: I0929 21:40:14.395262 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/54bedb11-6943-4e34-a221-8dbd2cfd5eee-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"54bedb11-6943-4e34-a221-8dbd2cfd5eee\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:40:14 crc kubenswrapper[4911]: I0929 21:40:14.395298 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/54bedb11-6943-4e34-a221-8dbd2cfd5eee-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"54bedb11-6943-4e34-a221-8dbd2cfd5eee\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:40:14 crc kubenswrapper[4911]: I0929 21:40:14.395312 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/54bedb11-6943-4e34-a221-8dbd2cfd5eee-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"54bedb11-6943-4e34-a221-8dbd2cfd5eee\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:40:14 crc kubenswrapper[4911]: I0929 21:40:14.396090 4911 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"54bedb11-6943-4e34-a221-8dbd2cfd5eee\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:40:14 crc kubenswrapper[4911]: I0929 21:40:14.396289 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/54bedb11-6943-4e34-a221-8dbd2cfd5eee-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"54bedb11-6943-4e34-a221-8dbd2cfd5eee\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:40:14 crc kubenswrapper[4911]: I0929 21:40:14.396825 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/54bedb11-6943-4e34-a221-8dbd2cfd5eee-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"54bedb11-6943-4e34-a221-8dbd2cfd5eee\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:40:14 crc kubenswrapper[4911]: I0929 21:40:14.397334 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/54bedb11-6943-4e34-a221-8dbd2cfd5eee-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"54bedb11-6943-4e34-a221-8dbd2cfd5eee\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:40:14 crc kubenswrapper[4911]: I0929 21:40:14.397437 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/54bedb11-6943-4e34-a221-8dbd2cfd5eee-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"54bedb11-6943-4e34-a221-8dbd2cfd5eee\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:40:14 crc kubenswrapper[4911]: I0929 21:40:14.397927 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/54bedb11-6943-4e34-a221-8dbd2cfd5eee-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"54bedb11-6943-4e34-a221-8dbd2cfd5eee\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:40:14 crc kubenswrapper[4911]: I0929 21:40:14.402622 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/54bedb11-6943-4e34-a221-8dbd2cfd5eee-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"54bedb11-6943-4e34-a221-8dbd2cfd5eee\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:40:14 crc kubenswrapper[4911]: I0929 21:40:14.404457 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/54bedb11-6943-4e34-a221-8dbd2cfd5eee-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"54bedb11-6943-4e34-a221-8dbd2cfd5eee\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:40:14 crc kubenswrapper[4911]: I0929 21:40:14.413292 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/54bedb11-6943-4e34-a221-8dbd2cfd5eee-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"54bedb11-6943-4e34-a221-8dbd2cfd5eee\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:40:14 crc kubenswrapper[4911]: I0929 21:40:14.414701 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"54bedb11-6943-4e34-a221-8dbd2cfd5eee\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:40:14 crc kubenswrapper[4911]: I0929 21:40:14.415648 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/54bedb11-6943-4e34-a221-8dbd2cfd5eee-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"54bedb11-6943-4e34-a221-8dbd2cfd5eee\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:40:14 crc kubenswrapper[4911]: I0929 21:40:14.416653 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9rhbd\" (UniqueName: \"kubernetes.io/projected/54bedb11-6943-4e34-a221-8dbd2cfd5eee-kube-api-access-9rhbd\") pod \"rabbitmq-cell1-server-0\" (UID: \"54bedb11-6943-4e34-a221-8dbd2cfd5eee\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:40:14 crc kubenswrapper[4911]: I0929 21:40:14.500945 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:40:16 crc kubenswrapper[4911]: I0929 21:40:16.756419 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Sep 29 21:40:16 crc kubenswrapper[4911]: I0929 21:40:16.758780 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Sep 29 21:40:16 crc kubenswrapper[4911]: I0929 21:40:16.768421 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Sep 29 21:40:16 crc kubenswrapper[4911]: I0929 21:40:16.768455 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Sep 29 21:40:16 crc kubenswrapper[4911]: I0929 21:40:16.768655 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Sep 29 21:40:16 crc kubenswrapper[4911]: I0929 21:40:16.768683 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-fcvmz" Sep 29 21:40:16 crc kubenswrapper[4911]: I0929 21:40:16.768763 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Sep 29 21:40:16 crc kubenswrapper[4911]: I0929 21:40:16.776511 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Sep 29 21:40:16 crc kubenswrapper[4911]: I0929 21:40:16.778782 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Sep 29 21:40:16 crc kubenswrapper[4911]: I0929 21:40:16.789909 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Sep 29 21:40:16 crc kubenswrapper[4911]: I0929 21:40:16.792157 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Sep 29 21:40:16 crc kubenswrapper[4911]: I0929 21:40:16.795063 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Sep 29 21:40:16 crc kubenswrapper[4911]: I0929 21:40:16.795363 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Sep 29 21:40:16 crc kubenswrapper[4911]: I0929 21:40:16.795591 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-db9dq" Sep 29 21:40:16 crc kubenswrapper[4911]: I0929 21:40:16.795874 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Sep 29 21:40:16 crc kubenswrapper[4911]: I0929 21:40:16.796941 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Sep 29 21:40:16 crc kubenswrapper[4911]: I0929 21:40:16.831681 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/9fb504fe-401c-4b1d-af71-171d017883be-kolla-config\") pod \"openstack-galera-0\" (UID: \"9fb504fe-401c-4b1d-af71-171d017883be\") " pod="openstack/openstack-galera-0" Sep 29 21:40:16 crc kubenswrapper[4911]: I0929 21:40:16.831732 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/df57577d-bb06-4339-a3e6-27a2cf733d17-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"df57577d-bb06-4339-a3e6-27a2cf733d17\") " pod="openstack/openstack-cell1-galera-0" Sep 29 21:40:16 crc kubenswrapper[4911]: I0929 21:40:16.831759 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-cell1-galera-0\" (UID: \"df57577d-bb06-4339-a3e6-27a2cf733d17\") " pod="openstack/openstack-cell1-galera-0" Sep 29 21:40:16 crc kubenswrapper[4911]: I0929 21:40:16.831780 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/df57577d-bb06-4339-a3e6-27a2cf733d17-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"df57577d-bb06-4339-a3e6-27a2cf733d17\") " pod="openstack/openstack-cell1-galera-0" Sep 29 21:40:16 crc kubenswrapper[4911]: I0929 21:40:16.831812 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/9fb504fe-401c-4b1d-af71-171d017883be-config-data-generated\") pod \"openstack-galera-0\" (UID: \"9fb504fe-401c-4b1d-af71-171d017883be\") " pod="openstack/openstack-galera-0" Sep 29 21:40:16 crc kubenswrapper[4911]: I0929 21:40:16.832002 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df57577d-bb06-4339-a3e6-27a2cf733d17-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"df57577d-bb06-4339-a3e6-27a2cf733d17\") " pod="openstack/openstack-cell1-galera-0" Sep 29 21:40:16 crc kubenswrapper[4911]: I0929 21:40:16.832133 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9fb504fe-401c-4b1d-af71-171d017883be-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"9fb504fe-401c-4b1d-af71-171d017883be\") " pod="openstack/openstack-galera-0" Sep 29 21:40:16 crc kubenswrapper[4911]: I0929 21:40:16.832498 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-galera-0\" (UID: \"9fb504fe-401c-4b1d-af71-171d017883be\") " pod="openstack/openstack-galera-0" Sep 29 21:40:16 crc kubenswrapper[4911]: I0929 21:40:16.832627 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/df57577d-bb06-4339-a3e6-27a2cf733d17-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"df57577d-bb06-4339-a3e6-27a2cf733d17\") " pod="openstack/openstack-cell1-galera-0" Sep 29 21:40:16 crc kubenswrapper[4911]: I0929 21:40:16.832674 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/9fb504fe-401c-4b1d-af71-171d017883be-config-data-default\") pod \"openstack-galera-0\" (UID: \"9fb504fe-401c-4b1d-af71-171d017883be\") " pod="openstack/openstack-galera-0" Sep 29 21:40:16 crc kubenswrapper[4911]: I0929 21:40:16.832703 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nkkkk\" (UniqueName: \"kubernetes.io/projected/9fb504fe-401c-4b1d-af71-171d017883be-kube-api-access-nkkkk\") pod \"openstack-galera-0\" (UID: \"9fb504fe-401c-4b1d-af71-171d017883be\") " pod="openstack/openstack-galera-0" Sep 29 21:40:16 crc kubenswrapper[4911]: I0929 21:40:16.832733 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zdt68\" (UniqueName: \"kubernetes.io/projected/df57577d-bb06-4339-a3e6-27a2cf733d17-kube-api-access-zdt68\") pod \"openstack-cell1-galera-0\" (UID: \"df57577d-bb06-4339-a3e6-27a2cf733d17\") " pod="openstack/openstack-cell1-galera-0" Sep 29 21:40:16 crc kubenswrapper[4911]: I0929 21:40:16.832759 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/9fb504fe-401c-4b1d-af71-171d017883be-secrets\") pod \"openstack-galera-0\" (UID: \"9fb504fe-401c-4b1d-af71-171d017883be\") " pod="openstack/openstack-galera-0" Sep 29 21:40:16 crc kubenswrapper[4911]: I0929 21:40:16.832803 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/df57577d-bb06-4339-a3e6-27a2cf733d17-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"df57577d-bb06-4339-a3e6-27a2cf733d17\") " pod="openstack/openstack-cell1-galera-0" Sep 29 21:40:16 crc kubenswrapper[4911]: I0929 21:40:16.832852 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/df57577d-bb06-4339-a3e6-27a2cf733d17-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"df57577d-bb06-4339-a3e6-27a2cf733d17\") " pod="openstack/openstack-cell1-galera-0" Sep 29 21:40:16 crc kubenswrapper[4911]: I0929 21:40:16.832914 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/df57577d-bb06-4339-a3e6-27a2cf733d17-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"df57577d-bb06-4339-a3e6-27a2cf733d17\") " pod="openstack/openstack-cell1-galera-0" Sep 29 21:40:16 crc kubenswrapper[4911]: I0929 21:40:16.832959 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9fb504fe-401c-4b1d-af71-171d017883be-operator-scripts\") pod \"openstack-galera-0\" (UID: \"9fb504fe-401c-4b1d-af71-171d017883be\") " pod="openstack/openstack-galera-0" Sep 29 21:40:16 crc kubenswrapper[4911]: I0929 21:40:16.833752 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/9fb504fe-401c-4b1d-af71-171d017883be-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"9fb504fe-401c-4b1d-af71-171d017883be\") " pod="openstack/openstack-galera-0" Sep 29 21:40:16 crc kubenswrapper[4911]: I0929 21:40:16.935048 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9fb504fe-401c-4b1d-af71-171d017883be-operator-scripts\") pod \"openstack-galera-0\" (UID: \"9fb504fe-401c-4b1d-af71-171d017883be\") " pod="openstack/openstack-galera-0" Sep 29 21:40:16 crc kubenswrapper[4911]: I0929 21:40:16.935927 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/9fb504fe-401c-4b1d-af71-171d017883be-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"9fb504fe-401c-4b1d-af71-171d017883be\") " pod="openstack/openstack-galera-0" Sep 29 21:40:16 crc kubenswrapper[4911]: I0929 21:40:16.936391 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/9fb504fe-401c-4b1d-af71-171d017883be-kolla-config\") pod \"openstack-galera-0\" (UID: \"9fb504fe-401c-4b1d-af71-171d017883be\") " pod="openstack/openstack-galera-0" Sep 29 21:40:16 crc kubenswrapper[4911]: I0929 21:40:16.936670 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/df57577d-bb06-4339-a3e6-27a2cf733d17-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"df57577d-bb06-4339-a3e6-27a2cf733d17\") " pod="openstack/openstack-cell1-galera-0" Sep 29 21:40:16 crc kubenswrapper[4911]: I0929 21:40:16.936881 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-cell1-galera-0\" (UID: \"df57577d-bb06-4339-a3e6-27a2cf733d17\") " pod="openstack/openstack-cell1-galera-0" Sep 29 21:40:16 crc kubenswrapper[4911]: I0929 21:40:16.937049 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/df57577d-bb06-4339-a3e6-27a2cf733d17-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"df57577d-bb06-4339-a3e6-27a2cf733d17\") " pod="openstack/openstack-cell1-galera-0" Sep 29 21:40:16 crc kubenswrapper[4911]: I0929 21:40:16.937204 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/9fb504fe-401c-4b1d-af71-171d017883be-config-data-generated\") pod \"openstack-galera-0\" (UID: \"9fb504fe-401c-4b1d-af71-171d017883be\") " pod="openstack/openstack-galera-0" Sep 29 21:40:16 crc kubenswrapper[4911]: I0929 21:40:16.938007 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/9fb504fe-401c-4b1d-af71-171d017883be-kolla-config\") pod \"openstack-galera-0\" (UID: \"9fb504fe-401c-4b1d-af71-171d017883be\") " pod="openstack/openstack-galera-0" Sep 29 21:40:16 crc kubenswrapper[4911]: I0929 21:40:16.937901 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/9fb504fe-401c-4b1d-af71-171d017883be-config-data-generated\") pod \"openstack-galera-0\" (UID: \"9fb504fe-401c-4b1d-af71-171d017883be\") " pod="openstack/openstack-galera-0" Sep 29 21:40:16 crc kubenswrapper[4911]: I0929 21:40:16.937470 4911 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-cell1-galera-0\" (UID: \"df57577d-bb06-4339-a3e6-27a2cf733d17\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/openstack-cell1-galera-0" Sep 29 21:40:16 crc kubenswrapper[4911]: I0929 21:40:16.938532 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df57577d-bb06-4339-a3e6-27a2cf733d17-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"df57577d-bb06-4339-a3e6-27a2cf733d17\") " pod="openstack/openstack-cell1-galera-0" Sep 29 21:40:16 crc kubenswrapper[4911]: I0929 21:40:16.939051 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/df57577d-bb06-4339-a3e6-27a2cf733d17-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"df57577d-bb06-4339-a3e6-27a2cf733d17\") " pod="openstack/openstack-cell1-galera-0" Sep 29 21:40:16 crc kubenswrapper[4911]: I0929 21:40:16.939425 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9fb504fe-401c-4b1d-af71-171d017883be-operator-scripts\") pod \"openstack-galera-0\" (UID: \"9fb504fe-401c-4b1d-af71-171d017883be\") " pod="openstack/openstack-galera-0" Sep 29 21:40:16 crc kubenswrapper[4911]: I0929 21:40:16.939670 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9fb504fe-401c-4b1d-af71-171d017883be-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"9fb504fe-401c-4b1d-af71-171d017883be\") " pod="openstack/openstack-galera-0" Sep 29 21:40:16 crc kubenswrapper[4911]: I0929 21:40:16.939889 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-galera-0\" (UID: \"9fb504fe-401c-4b1d-af71-171d017883be\") " pod="openstack/openstack-galera-0" Sep 29 21:40:16 crc kubenswrapper[4911]: I0929 21:40:16.940090 4911 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-galera-0\" (UID: \"9fb504fe-401c-4b1d-af71-171d017883be\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/openstack-galera-0" Sep 29 21:40:16 crc kubenswrapper[4911]: I0929 21:40:16.940102 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/df57577d-bb06-4339-a3e6-27a2cf733d17-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"df57577d-bb06-4339-a3e6-27a2cf733d17\") " pod="openstack/openstack-cell1-galera-0" Sep 29 21:40:16 crc kubenswrapper[4911]: I0929 21:40:16.940528 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/9fb504fe-401c-4b1d-af71-171d017883be-config-data-default\") pod \"openstack-galera-0\" (UID: \"9fb504fe-401c-4b1d-af71-171d017883be\") " pod="openstack/openstack-galera-0" Sep 29 21:40:16 crc kubenswrapper[4911]: I0929 21:40:16.940735 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nkkkk\" (UniqueName: \"kubernetes.io/projected/9fb504fe-401c-4b1d-af71-171d017883be-kube-api-access-nkkkk\") pod \"openstack-galera-0\" (UID: \"9fb504fe-401c-4b1d-af71-171d017883be\") " pod="openstack/openstack-galera-0" Sep 29 21:40:16 crc kubenswrapper[4911]: I0929 21:40:16.941059 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zdt68\" (UniqueName: \"kubernetes.io/projected/df57577d-bb06-4339-a3e6-27a2cf733d17-kube-api-access-zdt68\") pod \"openstack-cell1-galera-0\" (UID: \"df57577d-bb06-4339-a3e6-27a2cf733d17\") " pod="openstack/openstack-cell1-galera-0" Sep 29 21:40:16 crc kubenswrapper[4911]: I0929 21:40:16.941227 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/9fb504fe-401c-4b1d-af71-171d017883be-secrets\") pod \"openstack-galera-0\" (UID: \"9fb504fe-401c-4b1d-af71-171d017883be\") " pod="openstack/openstack-galera-0" Sep 29 21:40:16 crc kubenswrapper[4911]: I0929 21:40:16.941390 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/df57577d-bb06-4339-a3e6-27a2cf733d17-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"df57577d-bb06-4339-a3e6-27a2cf733d17\") " pod="openstack/openstack-cell1-galera-0" Sep 29 21:40:16 crc kubenswrapper[4911]: I0929 21:40:16.941558 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/df57577d-bb06-4339-a3e6-27a2cf733d17-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"df57577d-bb06-4339-a3e6-27a2cf733d17\") " pod="openstack/openstack-cell1-galera-0" Sep 29 21:40:16 crc kubenswrapper[4911]: I0929 21:40:16.941741 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/df57577d-bb06-4339-a3e6-27a2cf733d17-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"df57577d-bb06-4339-a3e6-27a2cf733d17\") " pod="openstack/openstack-cell1-galera-0" Sep 29 21:40:16 crc kubenswrapper[4911]: I0929 21:40:16.941469 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/9fb504fe-401c-4b1d-af71-171d017883be-config-data-default\") pod \"openstack-galera-0\" (UID: \"9fb504fe-401c-4b1d-af71-171d017883be\") " pod="openstack/openstack-galera-0" Sep 29 21:40:16 crc kubenswrapper[4911]: I0929 21:40:16.942322 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/df57577d-bb06-4339-a3e6-27a2cf733d17-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"df57577d-bb06-4339-a3e6-27a2cf733d17\") " pod="openstack/openstack-cell1-galera-0" Sep 29 21:40:16 crc kubenswrapper[4911]: I0929 21:40:16.941125 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/df57577d-bb06-4339-a3e6-27a2cf733d17-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"df57577d-bb06-4339-a3e6-27a2cf733d17\") " pod="openstack/openstack-cell1-galera-0" Sep 29 21:40:16 crc kubenswrapper[4911]: I0929 21:40:16.944909 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df57577d-bb06-4339-a3e6-27a2cf733d17-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"df57577d-bb06-4339-a3e6-27a2cf733d17\") " pod="openstack/openstack-cell1-galera-0" Sep 29 21:40:16 crc kubenswrapper[4911]: I0929 21:40:16.945226 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/df57577d-bb06-4339-a3e6-27a2cf733d17-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"df57577d-bb06-4339-a3e6-27a2cf733d17\") " pod="openstack/openstack-cell1-galera-0" Sep 29 21:40:16 crc kubenswrapper[4911]: I0929 21:40:16.946610 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9fb504fe-401c-4b1d-af71-171d017883be-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"9fb504fe-401c-4b1d-af71-171d017883be\") " pod="openstack/openstack-galera-0" Sep 29 21:40:16 crc kubenswrapper[4911]: I0929 21:40:16.949074 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/df57577d-bb06-4339-a3e6-27a2cf733d17-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"df57577d-bb06-4339-a3e6-27a2cf733d17\") " pod="openstack/openstack-cell1-galera-0" Sep 29 21:40:16 crc kubenswrapper[4911]: I0929 21:40:16.949433 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/9fb504fe-401c-4b1d-af71-171d017883be-secrets\") pod \"openstack-galera-0\" (UID: \"9fb504fe-401c-4b1d-af71-171d017883be\") " pod="openstack/openstack-galera-0" Sep 29 21:40:16 crc kubenswrapper[4911]: I0929 21:40:16.955755 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/9fb504fe-401c-4b1d-af71-171d017883be-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"9fb504fe-401c-4b1d-af71-171d017883be\") " pod="openstack/openstack-galera-0" Sep 29 21:40:16 crc kubenswrapper[4911]: I0929 21:40:16.956382 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/df57577d-bb06-4339-a3e6-27a2cf733d17-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"df57577d-bb06-4339-a3e6-27a2cf733d17\") " pod="openstack/openstack-cell1-galera-0" Sep 29 21:40:16 crc kubenswrapper[4911]: I0929 21:40:16.966071 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-cell1-galera-0\" (UID: \"df57577d-bb06-4339-a3e6-27a2cf733d17\") " pod="openstack/openstack-cell1-galera-0" Sep 29 21:40:16 crc kubenswrapper[4911]: I0929 21:40:16.966895 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nkkkk\" (UniqueName: \"kubernetes.io/projected/9fb504fe-401c-4b1d-af71-171d017883be-kube-api-access-nkkkk\") pod \"openstack-galera-0\" (UID: \"9fb504fe-401c-4b1d-af71-171d017883be\") " pod="openstack/openstack-galera-0" Sep 29 21:40:16 crc kubenswrapper[4911]: I0929 21:40:16.970703 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zdt68\" (UniqueName: \"kubernetes.io/projected/df57577d-bb06-4339-a3e6-27a2cf733d17-kube-api-access-zdt68\") pod \"openstack-cell1-galera-0\" (UID: \"df57577d-bb06-4339-a3e6-27a2cf733d17\") " pod="openstack/openstack-cell1-galera-0" Sep 29 21:40:16 crc kubenswrapper[4911]: I0929 21:40:16.984635 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-galera-0\" (UID: \"9fb504fe-401c-4b1d-af71-171d017883be\") " pod="openstack/openstack-galera-0" Sep 29 21:40:17 crc kubenswrapper[4911]: I0929 21:40:17.093690 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Sep 29 21:40:17 crc kubenswrapper[4911]: I0929 21:40:17.107067 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Sep 29 21:40:17 crc kubenswrapper[4911]: I0929 21:40:17.194890 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Sep 29 21:40:17 crc kubenswrapper[4911]: I0929 21:40:17.196047 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Sep 29 21:40:17 crc kubenswrapper[4911]: I0929 21:40:17.198698 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Sep 29 21:40:17 crc kubenswrapper[4911]: I0929 21:40:17.198766 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-ktcfm" Sep 29 21:40:17 crc kubenswrapper[4911]: I0929 21:40:17.207167 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Sep 29 21:40:17 crc kubenswrapper[4911]: I0929 21:40:17.208562 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Sep 29 21:40:17 crc kubenswrapper[4911]: I0929 21:40:17.247421 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5849b99d-1ca7-4258-b88a-704a89d46c4e-kolla-config\") pod \"memcached-0\" (UID: \"5849b99d-1ca7-4258-b88a-704a89d46c4e\") " pod="openstack/memcached-0" Sep 29 21:40:17 crc kubenswrapper[4911]: I0929 21:40:17.247485 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5849b99d-1ca7-4258-b88a-704a89d46c4e-config-data\") pod \"memcached-0\" (UID: \"5849b99d-1ca7-4258-b88a-704a89d46c4e\") " pod="openstack/memcached-0" Sep 29 21:40:17 crc kubenswrapper[4911]: I0929 21:40:17.247512 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/5849b99d-1ca7-4258-b88a-704a89d46c4e-memcached-tls-certs\") pod \"memcached-0\" (UID: \"5849b99d-1ca7-4258-b88a-704a89d46c4e\") " pod="openstack/memcached-0" Sep 29 21:40:17 crc kubenswrapper[4911]: I0929 21:40:17.247590 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5849b99d-1ca7-4258-b88a-704a89d46c4e-combined-ca-bundle\") pod \"memcached-0\" (UID: \"5849b99d-1ca7-4258-b88a-704a89d46c4e\") " pod="openstack/memcached-0" Sep 29 21:40:17 crc kubenswrapper[4911]: I0929 21:40:17.247638 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-shsww\" (UniqueName: \"kubernetes.io/projected/5849b99d-1ca7-4258-b88a-704a89d46c4e-kube-api-access-shsww\") pod \"memcached-0\" (UID: \"5849b99d-1ca7-4258-b88a-704a89d46c4e\") " pod="openstack/memcached-0" Sep 29 21:40:17 crc kubenswrapper[4911]: I0929 21:40:17.349573 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-shsww\" (UniqueName: \"kubernetes.io/projected/5849b99d-1ca7-4258-b88a-704a89d46c4e-kube-api-access-shsww\") pod \"memcached-0\" (UID: \"5849b99d-1ca7-4258-b88a-704a89d46c4e\") " pod="openstack/memcached-0" Sep 29 21:40:17 crc kubenswrapper[4911]: I0929 21:40:17.349631 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5849b99d-1ca7-4258-b88a-704a89d46c4e-kolla-config\") pod \"memcached-0\" (UID: \"5849b99d-1ca7-4258-b88a-704a89d46c4e\") " pod="openstack/memcached-0" Sep 29 21:40:17 crc kubenswrapper[4911]: I0929 21:40:17.349661 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5849b99d-1ca7-4258-b88a-704a89d46c4e-config-data\") pod \"memcached-0\" (UID: \"5849b99d-1ca7-4258-b88a-704a89d46c4e\") " pod="openstack/memcached-0" Sep 29 21:40:17 crc kubenswrapper[4911]: I0929 21:40:17.349678 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/5849b99d-1ca7-4258-b88a-704a89d46c4e-memcached-tls-certs\") pod \"memcached-0\" (UID: \"5849b99d-1ca7-4258-b88a-704a89d46c4e\") " pod="openstack/memcached-0" Sep 29 21:40:17 crc kubenswrapper[4911]: I0929 21:40:17.349741 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5849b99d-1ca7-4258-b88a-704a89d46c4e-combined-ca-bundle\") pod \"memcached-0\" (UID: \"5849b99d-1ca7-4258-b88a-704a89d46c4e\") " pod="openstack/memcached-0" Sep 29 21:40:17 crc kubenswrapper[4911]: I0929 21:40:17.351042 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5849b99d-1ca7-4258-b88a-704a89d46c4e-kolla-config\") pod \"memcached-0\" (UID: \"5849b99d-1ca7-4258-b88a-704a89d46c4e\") " pod="openstack/memcached-0" Sep 29 21:40:17 crc kubenswrapper[4911]: I0929 21:40:17.351049 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5849b99d-1ca7-4258-b88a-704a89d46c4e-config-data\") pod \"memcached-0\" (UID: \"5849b99d-1ca7-4258-b88a-704a89d46c4e\") " pod="openstack/memcached-0" Sep 29 21:40:17 crc kubenswrapper[4911]: I0929 21:40:17.368844 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5849b99d-1ca7-4258-b88a-704a89d46c4e-combined-ca-bundle\") pod \"memcached-0\" (UID: \"5849b99d-1ca7-4258-b88a-704a89d46c4e\") " pod="openstack/memcached-0" Sep 29 21:40:17 crc kubenswrapper[4911]: I0929 21:40:17.369208 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/5849b99d-1ca7-4258-b88a-704a89d46c4e-memcached-tls-certs\") pod \"memcached-0\" (UID: \"5849b99d-1ca7-4258-b88a-704a89d46c4e\") " pod="openstack/memcached-0" Sep 29 21:40:17 crc kubenswrapper[4911]: I0929 21:40:17.371691 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-shsww\" (UniqueName: \"kubernetes.io/projected/5849b99d-1ca7-4258-b88a-704a89d46c4e-kube-api-access-shsww\") pod \"memcached-0\" (UID: \"5849b99d-1ca7-4258-b88a-704a89d46c4e\") " pod="openstack/memcached-0" Sep 29 21:40:17 crc kubenswrapper[4911]: I0929 21:40:17.549527 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Sep 29 21:40:18 crc kubenswrapper[4911]: I0929 21:40:18.718629 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 21:40:18 crc kubenswrapper[4911]: I0929 21:40:18.720069 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 29 21:40:18 crc kubenswrapper[4911]: I0929 21:40:18.723429 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-nt6md" Sep 29 21:40:18 crc kubenswrapper[4911]: I0929 21:40:18.733982 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 21:40:18 crc kubenswrapper[4911]: I0929 21:40:18.775747 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gw745\" (UniqueName: \"kubernetes.io/projected/3a5c8719-7840-4411-a449-81012851c24d-kube-api-access-gw745\") pod \"kube-state-metrics-0\" (UID: \"3a5c8719-7840-4411-a449-81012851c24d\") " pod="openstack/kube-state-metrics-0" Sep 29 21:40:18 crc kubenswrapper[4911]: I0929 21:40:18.877590 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gw745\" (UniqueName: \"kubernetes.io/projected/3a5c8719-7840-4411-a449-81012851c24d-kube-api-access-gw745\") pod \"kube-state-metrics-0\" (UID: \"3a5c8719-7840-4411-a449-81012851c24d\") " pod="openstack/kube-state-metrics-0" Sep 29 21:40:18 crc kubenswrapper[4911]: I0929 21:40:18.918516 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gw745\" (UniqueName: \"kubernetes.io/projected/3a5c8719-7840-4411-a449-81012851c24d-kube-api-access-gw745\") pod \"kube-state-metrics-0\" (UID: \"3a5c8719-7840-4411-a449-81012851c24d\") " pod="openstack/kube-state-metrics-0" Sep 29 21:40:19 crc kubenswrapper[4911]: I0929 21:40:19.039120 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 29 21:40:22 crc kubenswrapper[4911]: I0929 21:40:22.741335 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-nm8s4"] Sep 29 21:40:22 crc kubenswrapper[4911]: I0929 21:40:22.742637 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-nm8s4" Sep 29 21:40:22 crc kubenswrapper[4911]: I0929 21:40:22.767190 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-5mztt" Sep 29 21:40:22 crc kubenswrapper[4911]: I0929 21:40:22.768180 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Sep 29 21:40:22 crc kubenswrapper[4911]: I0929 21:40:22.769090 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Sep 29 21:40:22 crc kubenswrapper[4911]: I0929 21:40:22.786832 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-nm8s4"] Sep 29 21:40:22 crc kubenswrapper[4911]: I0929 21:40:22.799706 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-xhwxg"] Sep 29 21:40:22 crc kubenswrapper[4911]: I0929 21:40:22.801260 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-xhwxg" Sep 29 21:40:22 crc kubenswrapper[4911]: I0929 21:40:22.809877 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-xhwxg"] Sep 29 21:40:22 crc kubenswrapper[4911]: I0929 21:40:22.837234 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/755d3290-eae3-4e58-9870-63681ce460d5-var-run\") pod \"ovn-controller-nm8s4\" (UID: \"755d3290-eae3-4e58-9870-63681ce460d5\") " pod="openstack/ovn-controller-nm8s4" Sep 29 21:40:22 crc kubenswrapper[4911]: I0929 21:40:22.837553 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/30d80d24-2072-4fa9-aa03-4448c693ec5f-var-lib\") pod \"ovn-controller-ovs-xhwxg\" (UID: \"30d80d24-2072-4fa9-aa03-4448c693ec5f\") " pod="openstack/ovn-controller-ovs-xhwxg" Sep 29 21:40:22 crc kubenswrapper[4911]: I0929 21:40:22.837718 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/755d3290-eae3-4e58-9870-63681ce460d5-var-run-ovn\") pod \"ovn-controller-nm8s4\" (UID: \"755d3290-eae3-4e58-9870-63681ce460d5\") " pod="openstack/ovn-controller-nm8s4" Sep 29 21:40:22 crc kubenswrapper[4911]: I0929 21:40:22.837873 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/30d80d24-2072-4fa9-aa03-4448c693ec5f-scripts\") pod \"ovn-controller-ovs-xhwxg\" (UID: \"30d80d24-2072-4fa9-aa03-4448c693ec5f\") " pod="openstack/ovn-controller-ovs-xhwxg" Sep 29 21:40:22 crc kubenswrapper[4911]: I0929 21:40:22.838001 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/755d3290-eae3-4e58-9870-63681ce460d5-scripts\") pod \"ovn-controller-nm8s4\" (UID: \"755d3290-eae3-4e58-9870-63681ce460d5\") " pod="openstack/ovn-controller-nm8s4" Sep 29 21:40:22 crc kubenswrapper[4911]: I0929 21:40:22.838139 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/30d80d24-2072-4fa9-aa03-4448c693ec5f-var-run\") pod \"ovn-controller-ovs-xhwxg\" (UID: \"30d80d24-2072-4fa9-aa03-4448c693ec5f\") " pod="openstack/ovn-controller-ovs-xhwxg" Sep 29 21:40:22 crc kubenswrapper[4911]: I0929 21:40:22.838278 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jdl65\" (UniqueName: \"kubernetes.io/projected/30d80d24-2072-4fa9-aa03-4448c693ec5f-kube-api-access-jdl65\") pod \"ovn-controller-ovs-xhwxg\" (UID: \"30d80d24-2072-4fa9-aa03-4448c693ec5f\") " pod="openstack/ovn-controller-ovs-xhwxg" Sep 29 21:40:22 crc kubenswrapper[4911]: I0929 21:40:22.838396 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2w4jm\" (UniqueName: \"kubernetes.io/projected/755d3290-eae3-4e58-9870-63681ce460d5-kube-api-access-2w4jm\") pod \"ovn-controller-nm8s4\" (UID: \"755d3290-eae3-4e58-9870-63681ce460d5\") " pod="openstack/ovn-controller-nm8s4" Sep 29 21:40:22 crc kubenswrapper[4911]: I0929 21:40:22.838530 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/755d3290-eae3-4e58-9870-63681ce460d5-var-log-ovn\") pod \"ovn-controller-nm8s4\" (UID: \"755d3290-eae3-4e58-9870-63681ce460d5\") " pod="openstack/ovn-controller-nm8s4" Sep 29 21:40:22 crc kubenswrapper[4911]: I0929 21:40:22.840347 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/755d3290-eae3-4e58-9870-63681ce460d5-combined-ca-bundle\") pod \"ovn-controller-nm8s4\" (UID: \"755d3290-eae3-4e58-9870-63681ce460d5\") " pod="openstack/ovn-controller-nm8s4" Sep 29 21:40:22 crc kubenswrapper[4911]: I0929 21:40:22.840478 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/30d80d24-2072-4fa9-aa03-4448c693ec5f-var-log\") pod \"ovn-controller-ovs-xhwxg\" (UID: \"30d80d24-2072-4fa9-aa03-4448c693ec5f\") " pod="openstack/ovn-controller-ovs-xhwxg" Sep 29 21:40:22 crc kubenswrapper[4911]: I0929 21:40:22.840605 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/755d3290-eae3-4e58-9870-63681ce460d5-ovn-controller-tls-certs\") pod \"ovn-controller-nm8s4\" (UID: \"755d3290-eae3-4e58-9870-63681ce460d5\") " pod="openstack/ovn-controller-nm8s4" Sep 29 21:40:22 crc kubenswrapper[4911]: I0929 21:40:22.840767 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/30d80d24-2072-4fa9-aa03-4448c693ec5f-etc-ovs\") pod \"ovn-controller-ovs-xhwxg\" (UID: \"30d80d24-2072-4fa9-aa03-4448c693ec5f\") " pod="openstack/ovn-controller-ovs-xhwxg" Sep 29 21:40:22 crc kubenswrapper[4911]: I0929 21:40:22.941494 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2w4jm\" (UniqueName: \"kubernetes.io/projected/755d3290-eae3-4e58-9870-63681ce460d5-kube-api-access-2w4jm\") pod \"ovn-controller-nm8s4\" (UID: \"755d3290-eae3-4e58-9870-63681ce460d5\") " pod="openstack/ovn-controller-nm8s4" Sep 29 21:40:22 crc kubenswrapper[4911]: I0929 21:40:22.941562 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/755d3290-eae3-4e58-9870-63681ce460d5-var-log-ovn\") pod \"ovn-controller-nm8s4\" (UID: \"755d3290-eae3-4e58-9870-63681ce460d5\") " pod="openstack/ovn-controller-nm8s4" Sep 29 21:40:22 crc kubenswrapper[4911]: I0929 21:40:22.941626 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/755d3290-eae3-4e58-9870-63681ce460d5-combined-ca-bundle\") pod \"ovn-controller-nm8s4\" (UID: \"755d3290-eae3-4e58-9870-63681ce460d5\") " pod="openstack/ovn-controller-nm8s4" Sep 29 21:40:22 crc kubenswrapper[4911]: I0929 21:40:22.941652 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/30d80d24-2072-4fa9-aa03-4448c693ec5f-var-log\") pod \"ovn-controller-ovs-xhwxg\" (UID: \"30d80d24-2072-4fa9-aa03-4448c693ec5f\") " pod="openstack/ovn-controller-ovs-xhwxg" Sep 29 21:40:22 crc kubenswrapper[4911]: I0929 21:40:22.941679 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/755d3290-eae3-4e58-9870-63681ce460d5-ovn-controller-tls-certs\") pod \"ovn-controller-nm8s4\" (UID: \"755d3290-eae3-4e58-9870-63681ce460d5\") " pod="openstack/ovn-controller-nm8s4" Sep 29 21:40:22 crc kubenswrapper[4911]: I0929 21:40:22.941753 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/30d80d24-2072-4fa9-aa03-4448c693ec5f-etc-ovs\") pod \"ovn-controller-ovs-xhwxg\" (UID: \"30d80d24-2072-4fa9-aa03-4448c693ec5f\") " pod="openstack/ovn-controller-ovs-xhwxg" Sep 29 21:40:22 crc kubenswrapper[4911]: I0929 21:40:22.941808 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/755d3290-eae3-4e58-9870-63681ce460d5-var-run\") pod \"ovn-controller-nm8s4\" (UID: \"755d3290-eae3-4e58-9870-63681ce460d5\") " pod="openstack/ovn-controller-nm8s4" Sep 29 21:40:22 crc kubenswrapper[4911]: I0929 21:40:22.941831 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/30d80d24-2072-4fa9-aa03-4448c693ec5f-var-lib\") pod \"ovn-controller-ovs-xhwxg\" (UID: \"30d80d24-2072-4fa9-aa03-4448c693ec5f\") " pod="openstack/ovn-controller-ovs-xhwxg" Sep 29 21:40:22 crc kubenswrapper[4911]: I0929 21:40:22.941864 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/755d3290-eae3-4e58-9870-63681ce460d5-var-run-ovn\") pod \"ovn-controller-nm8s4\" (UID: \"755d3290-eae3-4e58-9870-63681ce460d5\") " pod="openstack/ovn-controller-nm8s4" Sep 29 21:40:22 crc kubenswrapper[4911]: I0929 21:40:22.941894 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/30d80d24-2072-4fa9-aa03-4448c693ec5f-scripts\") pod \"ovn-controller-ovs-xhwxg\" (UID: \"30d80d24-2072-4fa9-aa03-4448c693ec5f\") " pod="openstack/ovn-controller-ovs-xhwxg" Sep 29 21:40:22 crc kubenswrapper[4911]: I0929 21:40:22.941913 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/755d3290-eae3-4e58-9870-63681ce460d5-scripts\") pod \"ovn-controller-nm8s4\" (UID: \"755d3290-eae3-4e58-9870-63681ce460d5\") " pod="openstack/ovn-controller-nm8s4" Sep 29 21:40:22 crc kubenswrapper[4911]: I0929 21:40:22.941964 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/30d80d24-2072-4fa9-aa03-4448c693ec5f-var-run\") pod \"ovn-controller-ovs-xhwxg\" (UID: \"30d80d24-2072-4fa9-aa03-4448c693ec5f\") " pod="openstack/ovn-controller-ovs-xhwxg" Sep 29 21:40:22 crc kubenswrapper[4911]: I0929 21:40:22.941989 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jdl65\" (UniqueName: \"kubernetes.io/projected/30d80d24-2072-4fa9-aa03-4448c693ec5f-kube-api-access-jdl65\") pod \"ovn-controller-ovs-xhwxg\" (UID: \"30d80d24-2072-4fa9-aa03-4448c693ec5f\") " pod="openstack/ovn-controller-ovs-xhwxg" Sep 29 21:40:22 crc kubenswrapper[4911]: I0929 21:40:22.942636 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/30d80d24-2072-4fa9-aa03-4448c693ec5f-var-lib\") pod \"ovn-controller-ovs-xhwxg\" (UID: \"30d80d24-2072-4fa9-aa03-4448c693ec5f\") " pod="openstack/ovn-controller-ovs-xhwxg" Sep 29 21:40:22 crc kubenswrapper[4911]: I0929 21:40:22.942822 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/755d3290-eae3-4e58-9870-63681ce460d5-var-run-ovn\") pod \"ovn-controller-nm8s4\" (UID: \"755d3290-eae3-4e58-9870-63681ce460d5\") " pod="openstack/ovn-controller-nm8s4" Sep 29 21:40:22 crc kubenswrapper[4911]: I0929 21:40:22.942823 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/755d3290-eae3-4e58-9870-63681ce460d5-var-run\") pod \"ovn-controller-nm8s4\" (UID: \"755d3290-eae3-4e58-9870-63681ce460d5\") " pod="openstack/ovn-controller-nm8s4" Sep 29 21:40:22 crc kubenswrapper[4911]: I0929 21:40:22.943101 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/755d3290-eae3-4e58-9870-63681ce460d5-var-log-ovn\") pod \"ovn-controller-nm8s4\" (UID: \"755d3290-eae3-4e58-9870-63681ce460d5\") " pod="openstack/ovn-controller-nm8s4" Sep 29 21:40:22 crc kubenswrapper[4911]: I0929 21:40:22.945076 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/30d80d24-2072-4fa9-aa03-4448c693ec5f-scripts\") pod \"ovn-controller-ovs-xhwxg\" (UID: \"30d80d24-2072-4fa9-aa03-4448c693ec5f\") " pod="openstack/ovn-controller-ovs-xhwxg" Sep 29 21:40:22 crc kubenswrapper[4911]: I0929 21:40:22.945139 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/30d80d24-2072-4fa9-aa03-4448c693ec5f-var-run\") pod \"ovn-controller-ovs-xhwxg\" (UID: \"30d80d24-2072-4fa9-aa03-4448c693ec5f\") " pod="openstack/ovn-controller-ovs-xhwxg" Sep 29 21:40:22 crc kubenswrapper[4911]: I0929 21:40:22.945826 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/30d80d24-2072-4fa9-aa03-4448c693ec5f-var-log\") pod \"ovn-controller-ovs-xhwxg\" (UID: \"30d80d24-2072-4fa9-aa03-4448c693ec5f\") " pod="openstack/ovn-controller-ovs-xhwxg" Sep 29 21:40:22 crc kubenswrapper[4911]: I0929 21:40:22.946013 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/30d80d24-2072-4fa9-aa03-4448c693ec5f-etc-ovs\") pod \"ovn-controller-ovs-xhwxg\" (UID: \"30d80d24-2072-4fa9-aa03-4448c693ec5f\") " pod="openstack/ovn-controller-ovs-xhwxg" Sep 29 21:40:22 crc kubenswrapper[4911]: I0929 21:40:22.948096 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/755d3290-eae3-4e58-9870-63681ce460d5-scripts\") pod \"ovn-controller-nm8s4\" (UID: \"755d3290-eae3-4e58-9870-63681ce460d5\") " pod="openstack/ovn-controller-nm8s4" Sep 29 21:40:22 crc kubenswrapper[4911]: I0929 21:40:22.966550 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/755d3290-eae3-4e58-9870-63681ce460d5-combined-ca-bundle\") pod \"ovn-controller-nm8s4\" (UID: \"755d3290-eae3-4e58-9870-63681ce460d5\") " pod="openstack/ovn-controller-nm8s4" Sep 29 21:40:22 crc kubenswrapper[4911]: I0929 21:40:22.966583 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/755d3290-eae3-4e58-9870-63681ce460d5-ovn-controller-tls-certs\") pod \"ovn-controller-nm8s4\" (UID: \"755d3290-eae3-4e58-9870-63681ce460d5\") " pod="openstack/ovn-controller-nm8s4" Sep 29 21:40:22 crc kubenswrapper[4911]: I0929 21:40:22.977940 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jdl65\" (UniqueName: \"kubernetes.io/projected/30d80d24-2072-4fa9-aa03-4448c693ec5f-kube-api-access-jdl65\") pod \"ovn-controller-ovs-xhwxg\" (UID: \"30d80d24-2072-4fa9-aa03-4448c693ec5f\") " pod="openstack/ovn-controller-ovs-xhwxg" Sep 29 21:40:23 crc kubenswrapper[4911]: I0929 21:40:23.000713 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2w4jm\" (UniqueName: \"kubernetes.io/projected/755d3290-eae3-4e58-9870-63681ce460d5-kube-api-access-2w4jm\") pod \"ovn-controller-nm8s4\" (UID: \"755d3290-eae3-4e58-9870-63681ce460d5\") " pod="openstack/ovn-controller-nm8s4" Sep 29 21:40:23 crc kubenswrapper[4911]: I0929 21:40:23.085179 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-nm8s4" Sep 29 21:40:23 crc kubenswrapper[4911]: I0929 21:40:23.121850 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-xhwxg" Sep 29 21:40:23 crc kubenswrapper[4911]: I0929 21:40:23.394010 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-dwtnp"] Sep 29 21:40:23 crc kubenswrapper[4911]: I0929 21:40:23.612778 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Sep 29 21:40:23 crc kubenswrapper[4911]: I0929 21:40:23.614473 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Sep 29 21:40:23 crc kubenswrapper[4911]: I0929 21:40:23.619934 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Sep 29 21:40:23 crc kubenswrapper[4911]: I0929 21:40:23.620579 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Sep 29 21:40:23 crc kubenswrapper[4911]: I0929 21:40:23.621594 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Sep 29 21:40:23 crc kubenswrapper[4911]: I0929 21:40:23.621782 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Sep 29 21:40:23 crc kubenswrapper[4911]: I0929 21:40:23.621920 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-m64g7" Sep 29 21:40:23 crc kubenswrapper[4911]: I0929 21:40:23.627739 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Sep 29 21:40:23 crc kubenswrapper[4911]: I0929 21:40:23.762185 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ccc0850-1d12-486d-bb28-2ebd69c456e0-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"7ccc0850-1d12-486d-bb28-2ebd69c456e0\") " pod="openstack/ovsdbserver-nb-0" Sep 29 21:40:23 crc kubenswrapper[4911]: I0929 21:40:23.762245 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/7ccc0850-1d12-486d-bb28-2ebd69c456e0-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"7ccc0850-1d12-486d-bb28-2ebd69c456e0\") " pod="openstack/ovsdbserver-nb-0" Sep 29 21:40:23 crc kubenswrapper[4911]: I0929 21:40:23.762280 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ccc0850-1d12-486d-bb28-2ebd69c456e0-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"7ccc0850-1d12-486d-bb28-2ebd69c456e0\") " pod="openstack/ovsdbserver-nb-0" Sep 29 21:40:23 crc kubenswrapper[4911]: I0929 21:40:23.762329 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sj8g5\" (UniqueName: \"kubernetes.io/projected/7ccc0850-1d12-486d-bb28-2ebd69c456e0-kube-api-access-sj8g5\") pod \"ovsdbserver-nb-0\" (UID: \"7ccc0850-1d12-486d-bb28-2ebd69c456e0\") " pod="openstack/ovsdbserver-nb-0" Sep 29 21:40:23 crc kubenswrapper[4911]: I0929 21:40:23.762389 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7ccc0850-1d12-486d-bb28-2ebd69c456e0-config\") pod \"ovsdbserver-nb-0\" (UID: \"7ccc0850-1d12-486d-bb28-2ebd69c456e0\") " pod="openstack/ovsdbserver-nb-0" Sep 29 21:40:23 crc kubenswrapper[4911]: I0929 21:40:23.762645 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7ccc0850-1d12-486d-bb28-2ebd69c456e0-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"7ccc0850-1d12-486d-bb28-2ebd69c456e0\") " pod="openstack/ovsdbserver-nb-0" Sep 29 21:40:23 crc kubenswrapper[4911]: I0929 21:40:23.762693 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ccc0850-1d12-486d-bb28-2ebd69c456e0-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"7ccc0850-1d12-486d-bb28-2ebd69c456e0\") " pod="openstack/ovsdbserver-nb-0" Sep 29 21:40:23 crc kubenswrapper[4911]: I0929 21:40:23.762733 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"7ccc0850-1d12-486d-bb28-2ebd69c456e0\") " pod="openstack/ovsdbserver-nb-0" Sep 29 21:40:23 crc kubenswrapper[4911]: I0929 21:40:23.864837 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/7ccc0850-1d12-486d-bb28-2ebd69c456e0-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"7ccc0850-1d12-486d-bb28-2ebd69c456e0\") " pod="openstack/ovsdbserver-nb-0" Sep 29 21:40:23 crc kubenswrapper[4911]: I0929 21:40:23.864907 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ccc0850-1d12-486d-bb28-2ebd69c456e0-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"7ccc0850-1d12-486d-bb28-2ebd69c456e0\") " pod="openstack/ovsdbserver-nb-0" Sep 29 21:40:23 crc kubenswrapper[4911]: I0929 21:40:23.864934 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7ccc0850-1d12-486d-bb28-2ebd69c456e0-config\") pod \"ovsdbserver-nb-0\" (UID: \"7ccc0850-1d12-486d-bb28-2ebd69c456e0\") " pod="openstack/ovsdbserver-nb-0" Sep 29 21:40:23 crc kubenswrapper[4911]: I0929 21:40:23.864956 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sj8g5\" (UniqueName: \"kubernetes.io/projected/7ccc0850-1d12-486d-bb28-2ebd69c456e0-kube-api-access-sj8g5\") pod \"ovsdbserver-nb-0\" (UID: \"7ccc0850-1d12-486d-bb28-2ebd69c456e0\") " pod="openstack/ovsdbserver-nb-0" Sep 29 21:40:23 crc kubenswrapper[4911]: I0929 21:40:23.865065 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7ccc0850-1d12-486d-bb28-2ebd69c456e0-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"7ccc0850-1d12-486d-bb28-2ebd69c456e0\") " pod="openstack/ovsdbserver-nb-0" Sep 29 21:40:23 crc kubenswrapper[4911]: I0929 21:40:23.865103 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ccc0850-1d12-486d-bb28-2ebd69c456e0-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"7ccc0850-1d12-486d-bb28-2ebd69c456e0\") " pod="openstack/ovsdbserver-nb-0" Sep 29 21:40:23 crc kubenswrapper[4911]: I0929 21:40:23.865147 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"7ccc0850-1d12-486d-bb28-2ebd69c456e0\") " pod="openstack/ovsdbserver-nb-0" Sep 29 21:40:23 crc kubenswrapper[4911]: I0929 21:40:23.865166 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ccc0850-1d12-486d-bb28-2ebd69c456e0-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"7ccc0850-1d12-486d-bb28-2ebd69c456e0\") " pod="openstack/ovsdbserver-nb-0" Sep 29 21:40:23 crc kubenswrapper[4911]: I0929 21:40:23.865278 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/7ccc0850-1d12-486d-bb28-2ebd69c456e0-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"7ccc0850-1d12-486d-bb28-2ebd69c456e0\") " pod="openstack/ovsdbserver-nb-0" Sep 29 21:40:23 crc kubenswrapper[4911]: I0929 21:40:23.866261 4911 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"7ccc0850-1d12-486d-bb28-2ebd69c456e0\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/ovsdbserver-nb-0" Sep 29 21:40:23 crc kubenswrapper[4911]: I0929 21:40:23.866464 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7ccc0850-1d12-486d-bb28-2ebd69c456e0-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"7ccc0850-1d12-486d-bb28-2ebd69c456e0\") " pod="openstack/ovsdbserver-nb-0" Sep 29 21:40:23 crc kubenswrapper[4911]: I0929 21:40:23.867086 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7ccc0850-1d12-486d-bb28-2ebd69c456e0-config\") pod \"ovsdbserver-nb-0\" (UID: \"7ccc0850-1d12-486d-bb28-2ebd69c456e0\") " pod="openstack/ovsdbserver-nb-0" Sep 29 21:40:23 crc kubenswrapper[4911]: I0929 21:40:23.871553 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ccc0850-1d12-486d-bb28-2ebd69c456e0-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"7ccc0850-1d12-486d-bb28-2ebd69c456e0\") " pod="openstack/ovsdbserver-nb-0" Sep 29 21:40:23 crc kubenswrapper[4911]: I0929 21:40:23.871577 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ccc0850-1d12-486d-bb28-2ebd69c456e0-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"7ccc0850-1d12-486d-bb28-2ebd69c456e0\") " pod="openstack/ovsdbserver-nb-0" Sep 29 21:40:23 crc kubenswrapper[4911]: I0929 21:40:23.871903 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ccc0850-1d12-486d-bb28-2ebd69c456e0-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"7ccc0850-1d12-486d-bb28-2ebd69c456e0\") " pod="openstack/ovsdbserver-nb-0" Sep 29 21:40:23 crc kubenswrapper[4911]: I0929 21:40:23.886174 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sj8g5\" (UniqueName: \"kubernetes.io/projected/7ccc0850-1d12-486d-bb28-2ebd69c456e0-kube-api-access-sj8g5\") pod \"ovsdbserver-nb-0\" (UID: \"7ccc0850-1d12-486d-bb28-2ebd69c456e0\") " pod="openstack/ovsdbserver-nb-0" Sep 29 21:40:23 crc kubenswrapper[4911]: I0929 21:40:23.917374 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"7ccc0850-1d12-486d-bb28-2ebd69c456e0\") " pod="openstack/ovsdbserver-nb-0" Sep 29 21:40:23 crc kubenswrapper[4911]: I0929 21:40:23.945815 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Sep 29 21:40:23 crc kubenswrapper[4911]: W0929 21:40:23.963373 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf4dcfa4a_a27b_4b42_938d_d7509e1d2f20.slice/crio-34749579387cfbac2129c8fd8166378449ebeb11cecec8356514a4f0aded861e WatchSource:0}: Error finding container 34749579387cfbac2129c8fd8166378449ebeb11cecec8356514a4f0aded861e: Status 404 returned error can't find the container with id 34749579387cfbac2129c8fd8166378449ebeb11cecec8356514a4f0aded861e Sep 29 21:40:23 crc kubenswrapper[4911]: E0929 21:40:23.990658 4911 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Sep 29 21:40:23 crc kubenswrapper[4911]: E0929 21:40:23.990902 4911 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-n5glp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-675f4bcbfc-4vjqh_openstack(9ba51b2f-ce7e-4d51-b30c-0917b93a2167): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 21:40:23 crc kubenswrapper[4911]: E0929 21:40:23.992398 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-675f4bcbfc-4vjqh" podUID="9ba51b2f-ce7e-4d51-b30c-0917b93a2167" Sep 29 21:40:24 crc kubenswrapper[4911]: I0929 21:40:24.001622 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-dwtnp" event={"ID":"f4dcfa4a-a27b-4b42-938d-d7509e1d2f20","Type":"ContainerStarted","Data":"34749579387cfbac2129c8fd8166378449ebeb11cecec8356514a4f0aded861e"} Sep 29 21:40:24 crc kubenswrapper[4911]: E0929 21:40:24.006057 4911 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Sep 29 21:40:24 crc kubenswrapper[4911]: E0929 21:40:24.006265 4911 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-4g6kg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-78dd6ddcc-2cxwm_openstack(f9c7e4f7-ea6f-4062-8215-f99cd085f5c0): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 21:40:24 crc kubenswrapper[4911]: E0929 21:40:24.007709 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-78dd6ddcc-2cxwm" podUID="f9c7e4f7-ea6f-4062-8215-f99cd085f5c0" Sep 29 21:40:24 crc kubenswrapper[4911]: I0929 21:40:24.533991 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Sep 29 21:40:24 crc kubenswrapper[4911]: I0929 21:40:24.539432 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 21:40:24 crc kubenswrapper[4911]: I0929 21:40:24.544319 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Sep 29 21:40:24 crc kubenswrapper[4911]: I0929 21:40:24.548874 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 21:40:24 crc kubenswrapper[4911]: W0929 21:40:24.550154 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9fb504fe_401c_4b1d_af71_171d017883be.slice/crio-283041649eedb506c404cd6ae892fbd1b302c9a7b60df6b02f26c24fbe348651 WatchSource:0}: Error finding container 283041649eedb506c404cd6ae892fbd1b302c9a7b60df6b02f26c24fbe348651: Status 404 returned error can't find the container with id 283041649eedb506c404cd6ae892fbd1b302c9a7b60df6b02f26c24fbe348651 Sep 29 21:40:24 crc kubenswrapper[4911]: W0929 21:40:24.552193 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddf57577d_bb06_4339_a3e6_27a2cf733d17.slice/crio-8a153c800fec326546b5372c9b7f62a270a8c42b6f398d15b4039b53930cb0b7 WatchSource:0}: Error finding container 8a153c800fec326546b5372c9b7f62a270a8c42b6f398d15b4039b53930cb0b7: Status 404 returned error can't find the container with id 8a153c800fec326546b5372c9b7f62a270a8c42b6f398d15b4039b53930cb0b7 Sep 29 21:40:24 crc kubenswrapper[4911]: W0929 21:40:24.553396 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod54bedb11_6943_4e34_a221_8dbd2cfd5eee.slice/crio-d44721ddf5f3aee40e36fd60b59bb1b90f8c2c9059f64492a96a44f8c9cabe5c WatchSource:0}: Error finding container d44721ddf5f3aee40e36fd60b59bb1b90f8c2c9059f64492a96a44f8c9cabe5c: Status 404 returned error can't find the container with id d44721ddf5f3aee40e36fd60b59bb1b90f8c2c9059f64492a96a44f8c9cabe5c Sep 29 21:40:24 crc kubenswrapper[4911]: W0929 21:40:24.558218 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod16704d0f_ad69_4cc9_890a_77c268d78151.slice/crio-3f2069697d4e0595ede69000808f174f941bb76209878f649a8a465d8bdff7eb WatchSource:0}: Error finding container 3f2069697d4e0595ede69000808f174f941bb76209878f649a8a465d8bdff7eb: Status 404 returned error can't find the container with id 3f2069697d4e0595ede69000808f174f941bb76209878f649a8a465d8bdff7eb Sep 29 21:40:24 crc kubenswrapper[4911]: I0929 21:40:24.736921 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-xd7cs"] Sep 29 21:40:24 crc kubenswrapper[4911]: I0929 21:40:24.747738 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-nm8s4"] Sep 29 21:40:24 crc kubenswrapper[4911]: W0929 21:40:24.786656 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd532207a_cd4a_4a4d_885e_c9d15d4d0d8a.slice/crio-a6beebddb6bb8ff22ffbb0e818153f193e17d50d225dbffb3b2cd02d1ed7b256 WatchSource:0}: Error finding container a6beebddb6bb8ff22ffbb0e818153f193e17d50d225dbffb3b2cd02d1ed7b256: Status 404 returned error can't find the container with id a6beebddb6bb8ff22ffbb0e818153f193e17d50d225dbffb3b2cd02d1ed7b256 Sep 29 21:40:24 crc kubenswrapper[4911]: I0929 21:40:24.835551 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Sep 29 21:40:24 crc kubenswrapper[4911]: W0929 21:40:24.846959 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7ccc0850_1d12_486d_bb28_2ebd69c456e0.slice/crio-b8a727217c619678ef8bbac3257db5776ecf688950b287b067fd5bb33065277a WatchSource:0}: Error finding container b8a727217c619678ef8bbac3257db5776ecf688950b287b067fd5bb33065277a: Status 404 returned error can't find the container with id b8a727217c619678ef8bbac3257db5776ecf688950b287b067fd5bb33065277a Sep 29 21:40:24 crc kubenswrapper[4911]: W0929 21:40:24.945743 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod30d80d24_2072_4fa9_aa03_4448c693ec5f.slice/crio-01e8bae8e9e1eb16814aa419f3f10169ca9ae128b6b588b79ae9424be447d7f5 WatchSource:0}: Error finding container 01e8bae8e9e1eb16814aa419f3f10169ca9ae128b6b588b79ae9424be447d7f5: Status 404 returned error can't find the container with id 01e8bae8e9e1eb16814aa419f3f10169ca9ae128b6b588b79ae9424be447d7f5 Sep 29 21:40:24 crc kubenswrapper[4911]: I0929 21:40:24.961065 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-xhwxg"] Sep 29 21:40:25 crc kubenswrapper[4911]: I0929 21:40:25.013109 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Sep 29 21:40:25 crc kubenswrapper[4911]: I0929 21:40:25.038639 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-xd7cs" event={"ID":"d532207a-cd4a-4a4d-885e-c9d15d4d0d8a","Type":"ContainerStarted","Data":"a6beebddb6bb8ff22ffbb0e818153f193e17d50d225dbffb3b2cd02d1ed7b256"} Sep 29 21:40:25 crc kubenswrapper[4911]: I0929 21:40:25.040034 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-xhwxg" event={"ID":"30d80d24-2072-4fa9-aa03-4448c693ec5f","Type":"ContainerStarted","Data":"01e8bae8e9e1eb16814aa419f3f10169ca9ae128b6b588b79ae9424be447d7f5"} Sep 29 21:40:25 crc kubenswrapper[4911]: I0929 21:40:25.078065 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-nm8s4" event={"ID":"755d3290-eae3-4e58-9870-63681ce460d5","Type":"ContainerStarted","Data":"2c6037bfcadb8457fb4369361db36157754b291e020c2119482400a47d514171"} Sep 29 21:40:25 crc kubenswrapper[4911]: I0929 21:40:25.089083 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 21:40:25 crc kubenswrapper[4911]: I0929 21:40:25.105587 4911 generic.go:334] "Generic (PLEG): container finished" podID="f4dcfa4a-a27b-4b42-938d-d7509e1d2f20" containerID="be129b1955864f39a5eaf751b8ea4be12d97a9f73044a47c12f0a15125023174" exitCode=0 Sep 29 21:40:25 crc kubenswrapper[4911]: I0929 21:40:25.105672 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-dwtnp" event={"ID":"f4dcfa4a-a27b-4b42-938d-d7509e1d2f20","Type":"ContainerDied","Data":"be129b1955864f39a5eaf751b8ea4be12d97a9f73044a47c12f0a15125023174"} Sep 29 21:40:25 crc kubenswrapper[4911]: I0929 21:40:25.113225 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"df57577d-bb06-4339-a3e6-27a2cf733d17","Type":"ContainerStarted","Data":"8a153c800fec326546b5372c9b7f62a270a8c42b6f398d15b4039b53930cb0b7"} Sep 29 21:40:25 crc kubenswrapper[4911]: I0929 21:40:25.119679 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"54bedb11-6943-4e34-a221-8dbd2cfd5eee","Type":"ContainerStarted","Data":"d44721ddf5f3aee40e36fd60b59bb1b90f8c2c9059f64492a96a44f8c9cabe5c"} Sep 29 21:40:25 crc kubenswrapper[4911]: I0929 21:40:25.124120 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"7ccc0850-1d12-486d-bb28-2ebd69c456e0","Type":"ContainerStarted","Data":"b8a727217c619678ef8bbac3257db5776ecf688950b287b067fd5bb33065277a"} Sep 29 21:40:25 crc kubenswrapper[4911]: I0929 21:40:25.132776 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"16704d0f-ad69-4cc9-890a-77c268d78151","Type":"ContainerStarted","Data":"3f2069697d4e0595ede69000808f174f941bb76209878f649a8a465d8bdff7eb"} Sep 29 21:40:25 crc kubenswrapper[4911]: I0929 21:40:25.135293 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"5849b99d-1ca7-4258-b88a-704a89d46c4e","Type":"ContainerStarted","Data":"cdd8852738609bdba65a7160672c72341a503ce9d462948a162210c5a7c8b652"} Sep 29 21:40:25 crc kubenswrapper[4911]: I0929 21:40:25.136443 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"9fb504fe-401c-4b1d-af71-171d017883be","Type":"ContainerStarted","Data":"283041649eedb506c404cd6ae892fbd1b302c9a7b60df6b02f26c24fbe348651"} Sep 29 21:40:25 crc kubenswrapper[4911]: I0929 21:40:25.212562 4911 patch_prober.go:28] interesting pod/machine-config-daemon-w647f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 21:40:25 crc kubenswrapper[4911]: I0929 21:40:25.212946 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 21:40:25 crc kubenswrapper[4911]: I0929 21:40:25.258068 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-prkv9"] Sep 29 21:40:25 crc kubenswrapper[4911]: I0929 21:40:25.259457 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-prkv9" Sep 29 21:40:25 crc kubenswrapper[4911]: I0929 21:40:25.261653 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Sep 29 21:40:25 crc kubenswrapper[4911]: I0929 21:40:25.274333 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-prkv9"] Sep 29 21:40:25 crc kubenswrapper[4911]: I0929 21:40:25.401676 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/b85ad830-8615-4f00-8d68-a2cb2b08dd68-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-prkv9\" (UID: \"b85ad830-8615-4f00-8d68-a2cb2b08dd68\") " pod="openstack/ovn-controller-metrics-prkv9" Sep 29 21:40:25 crc kubenswrapper[4911]: I0929 21:40:25.401726 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/b85ad830-8615-4f00-8d68-a2cb2b08dd68-ovn-rundir\") pod \"ovn-controller-metrics-prkv9\" (UID: \"b85ad830-8615-4f00-8d68-a2cb2b08dd68\") " pod="openstack/ovn-controller-metrics-prkv9" Sep 29 21:40:25 crc kubenswrapper[4911]: I0929 21:40:25.401775 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b85ad830-8615-4f00-8d68-a2cb2b08dd68-combined-ca-bundle\") pod \"ovn-controller-metrics-prkv9\" (UID: \"b85ad830-8615-4f00-8d68-a2cb2b08dd68\") " pod="openstack/ovn-controller-metrics-prkv9" Sep 29 21:40:25 crc kubenswrapper[4911]: I0929 21:40:25.401824 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p4rwl\" (UniqueName: \"kubernetes.io/projected/b85ad830-8615-4f00-8d68-a2cb2b08dd68-kube-api-access-p4rwl\") pod \"ovn-controller-metrics-prkv9\" (UID: \"b85ad830-8615-4f00-8d68-a2cb2b08dd68\") " pod="openstack/ovn-controller-metrics-prkv9" Sep 29 21:40:25 crc kubenswrapper[4911]: I0929 21:40:25.401859 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b85ad830-8615-4f00-8d68-a2cb2b08dd68-config\") pod \"ovn-controller-metrics-prkv9\" (UID: \"b85ad830-8615-4f00-8d68-a2cb2b08dd68\") " pod="openstack/ovn-controller-metrics-prkv9" Sep 29 21:40:25 crc kubenswrapper[4911]: I0929 21:40:25.401887 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/b85ad830-8615-4f00-8d68-a2cb2b08dd68-ovs-rundir\") pod \"ovn-controller-metrics-prkv9\" (UID: \"b85ad830-8615-4f00-8d68-a2cb2b08dd68\") " pod="openstack/ovn-controller-metrics-prkv9" Sep 29 21:40:25 crc kubenswrapper[4911]: I0929 21:40:25.503683 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/b85ad830-8615-4f00-8d68-a2cb2b08dd68-ovn-rundir\") pod \"ovn-controller-metrics-prkv9\" (UID: \"b85ad830-8615-4f00-8d68-a2cb2b08dd68\") " pod="openstack/ovn-controller-metrics-prkv9" Sep 29 21:40:25 crc kubenswrapper[4911]: I0929 21:40:25.503774 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b85ad830-8615-4f00-8d68-a2cb2b08dd68-combined-ca-bundle\") pod \"ovn-controller-metrics-prkv9\" (UID: \"b85ad830-8615-4f00-8d68-a2cb2b08dd68\") " pod="openstack/ovn-controller-metrics-prkv9" Sep 29 21:40:25 crc kubenswrapper[4911]: I0929 21:40:25.503817 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p4rwl\" (UniqueName: \"kubernetes.io/projected/b85ad830-8615-4f00-8d68-a2cb2b08dd68-kube-api-access-p4rwl\") pod \"ovn-controller-metrics-prkv9\" (UID: \"b85ad830-8615-4f00-8d68-a2cb2b08dd68\") " pod="openstack/ovn-controller-metrics-prkv9" Sep 29 21:40:25 crc kubenswrapper[4911]: I0929 21:40:25.503858 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b85ad830-8615-4f00-8d68-a2cb2b08dd68-config\") pod \"ovn-controller-metrics-prkv9\" (UID: \"b85ad830-8615-4f00-8d68-a2cb2b08dd68\") " pod="openstack/ovn-controller-metrics-prkv9" Sep 29 21:40:25 crc kubenswrapper[4911]: I0929 21:40:25.503897 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/b85ad830-8615-4f00-8d68-a2cb2b08dd68-ovs-rundir\") pod \"ovn-controller-metrics-prkv9\" (UID: \"b85ad830-8615-4f00-8d68-a2cb2b08dd68\") " pod="openstack/ovn-controller-metrics-prkv9" Sep 29 21:40:25 crc kubenswrapper[4911]: I0929 21:40:25.503957 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/b85ad830-8615-4f00-8d68-a2cb2b08dd68-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-prkv9\" (UID: \"b85ad830-8615-4f00-8d68-a2cb2b08dd68\") " pod="openstack/ovn-controller-metrics-prkv9" Sep 29 21:40:25 crc kubenswrapper[4911]: I0929 21:40:25.507350 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/b85ad830-8615-4f00-8d68-a2cb2b08dd68-ovn-rundir\") pod \"ovn-controller-metrics-prkv9\" (UID: \"b85ad830-8615-4f00-8d68-a2cb2b08dd68\") " pod="openstack/ovn-controller-metrics-prkv9" Sep 29 21:40:25 crc kubenswrapper[4911]: I0929 21:40:25.507548 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/b85ad830-8615-4f00-8d68-a2cb2b08dd68-ovs-rundir\") pod \"ovn-controller-metrics-prkv9\" (UID: \"b85ad830-8615-4f00-8d68-a2cb2b08dd68\") " pod="openstack/ovn-controller-metrics-prkv9" Sep 29 21:40:25 crc kubenswrapper[4911]: I0929 21:40:25.508448 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b85ad830-8615-4f00-8d68-a2cb2b08dd68-config\") pod \"ovn-controller-metrics-prkv9\" (UID: \"b85ad830-8615-4f00-8d68-a2cb2b08dd68\") " pod="openstack/ovn-controller-metrics-prkv9" Sep 29 21:40:25 crc kubenswrapper[4911]: I0929 21:40:25.512427 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b85ad830-8615-4f00-8d68-a2cb2b08dd68-combined-ca-bundle\") pod \"ovn-controller-metrics-prkv9\" (UID: \"b85ad830-8615-4f00-8d68-a2cb2b08dd68\") " pod="openstack/ovn-controller-metrics-prkv9" Sep 29 21:40:25 crc kubenswrapper[4911]: I0929 21:40:25.516518 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/b85ad830-8615-4f00-8d68-a2cb2b08dd68-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-prkv9\" (UID: \"b85ad830-8615-4f00-8d68-a2cb2b08dd68\") " pod="openstack/ovn-controller-metrics-prkv9" Sep 29 21:40:25 crc kubenswrapper[4911]: I0929 21:40:25.524171 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p4rwl\" (UniqueName: \"kubernetes.io/projected/b85ad830-8615-4f00-8d68-a2cb2b08dd68-kube-api-access-p4rwl\") pod \"ovn-controller-metrics-prkv9\" (UID: \"b85ad830-8615-4f00-8d68-a2cb2b08dd68\") " pod="openstack/ovn-controller-metrics-prkv9" Sep 29 21:40:25 crc kubenswrapper[4911]: I0929 21:40:25.524621 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-4vjqh" Sep 29 21:40:25 crc kubenswrapper[4911]: I0929 21:40:25.537064 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-2cxwm" Sep 29 21:40:25 crc kubenswrapper[4911]: I0929 21:40:25.590719 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-prkv9" Sep 29 21:40:25 crc kubenswrapper[4911]: I0929 21:40:25.654255 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-xd7cs"] Sep 29 21:40:25 crc kubenswrapper[4911]: I0929 21:40:25.670457 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-2lf6d"] Sep 29 21:40:25 crc kubenswrapper[4911]: I0929 21:40:25.679717 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fd796d7df-2lf6d" Sep 29 21:40:25 crc kubenswrapper[4911]: I0929 21:40:25.680338 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-2lf6d"] Sep 29 21:40:25 crc kubenswrapper[4911]: I0929 21:40:25.689055 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Sep 29 21:40:25 crc kubenswrapper[4911]: I0929 21:40:25.706135 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f9c7e4f7-ea6f-4062-8215-f99cd085f5c0-config\") pod \"f9c7e4f7-ea6f-4062-8215-f99cd085f5c0\" (UID: \"f9c7e4f7-ea6f-4062-8215-f99cd085f5c0\") " Sep 29 21:40:25 crc kubenswrapper[4911]: I0929 21:40:25.706205 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f9c7e4f7-ea6f-4062-8215-f99cd085f5c0-dns-svc\") pod \"f9c7e4f7-ea6f-4062-8215-f99cd085f5c0\" (UID: \"f9c7e4f7-ea6f-4062-8215-f99cd085f5c0\") " Sep 29 21:40:25 crc kubenswrapper[4911]: I0929 21:40:25.706235 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n5glp\" (UniqueName: \"kubernetes.io/projected/9ba51b2f-ce7e-4d51-b30c-0917b93a2167-kube-api-access-n5glp\") pod \"9ba51b2f-ce7e-4d51-b30c-0917b93a2167\" (UID: \"9ba51b2f-ce7e-4d51-b30c-0917b93a2167\") " Sep 29 21:40:25 crc kubenswrapper[4911]: I0929 21:40:25.706262 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4g6kg\" (UniqueName: \"kubernetes.io/projected/f9c7e4f7-ea6f-4062-8215-f99cd085f5c0-kube-api-access-4g6kg\") pod \"f9c7e4f7-ea6f-4062-8215-f99cd085f5c0\" (UID: \"f9c7e4f7-ea6f-4062-8215-f99cd085f5c0\") " Sep 29 21:40:25 crc kubenswrapper[4911]: I0929 21:40:25.706311 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9ba51b2f-ce7e-4d51-b30c-0917b93a2167-config\") pod \"9ba51b2f-ce7e-4d51-b30c-0917b93a2167\" (UID: \"9ba51b2f-ce7e-4d51-b30c-0917b93a2167\") " Sep 29 21:40:25 crc kubenswrapper[4911]: I0929 21:40:25.707255 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f9c7e4f7-ea6f-4062-8215-f99cd085f5c0-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f9c7e4f7-ea6f-4062-8215-f99cd085f5c0" (UID: "f9c7e4f7-ea6f-4062-8215-f99cd085f5c0"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:40:25 crc kubenswrapper[4911]: I0929 21:40:25.707986 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9ba51b2f-ce7e-4d51-b30c-0917b93a2167-config" (OuterVolumeSpecName: "config") pod "9ba51b2f-ce7e-4d51-b30c-0917b93a2167" (UID: "9ba51b2f-ce7e-4d51-b30c-0917b93a2167"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:40:25 crc kubenswrapper[4911]: I0929 21:40:25.708863 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f9c7e4f7-ea6f-4062-8215-f99cd085f5c0-config" (OuterVolumeSpecName: "config") pod "f9c7e4f7-ea6f-4062-8215-f99cd085f5c0" (UID: "f9c7e4f7-ea6f-4062-8215-f99cd085f5c0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:40:25 crc kubenswrapper[4911]: I0929 21:40:25.719622 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9ba51b2f-ce7e-4d51-b30c-0917b93a2167-kube-api-access-n5glp" (OuterVolumeSpecName: "kube-api-access-n5glp") pod "9ba51b2f-ce7e-4d51-b30c-0917b93a2167" (UID: "9ba51b2f-ce7e-4d51-b30c-0917b93a2167"). InnerVolumeSpecName "kube-api-access-n5glp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:40:25 crc kubenswrapper[4911]: I0929 21:40:25.729650 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f9c7e4f7-ea6f-4062-8215-f99cd085f5c0-kube-api-access-4g6kg" (OuterVolumeSpecName: "kube-api-access-4g6kg") pod "f9c7e4f7-ea6f-4062-8215-f99cd085f5c0" (UID: "f9c7e4f7-ea6f-4062-8215-f99cd085f5c0"). InnerVolumeSpecName "kube-api-access-4g6kg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:40:25 crc kubenswrapper[4911]: I0929 21:40:25.808401 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6ae2c7f1-3c57-4aa1-8d29-f56ab6f2dce9-dns-svc\") pod \"dnsmasq-dns-7fd796d7df-2lf6d\" (UID: \"6ae2c7f1-3c57-4aa1-8d29-f56ab6f2dce9\") " pod="openstack/dnsmasq-dns-7fd796d7df-2lf6d" Sep 29 21:40:25 crc kubenswrapper[4911]: I0929 21:40:25.809548 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ptqpj\" (UniqueName: \"kubernetes.io/projected/6ae2c7f1-3c57-4aa1-8d29-f56ab6f2dce9-kube-api-access-ptqpj\") pod \"dnsmasq-dns-7fd796d7df-2lf6d\" (UID: \"6ae2c7f1-3c57-4aa1-8d29-f56ab6f2dce9\") " pod="openstack/dnsmasq-dns-7fd796d7df-2lf6d" Sep 29 21:40:25 crc kubenswrapper[4911]: I0929 21:40:25.809612 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6ae2c7f1-3c57-4aa1-8d29-f56ab6f2dce9-config\") pod \"dnsmasq-dns-7fd796d7df-2lf6d\" (UID: \"6ae2c7f1-3c57-4aa1-8d29-f56ab6f2dce9\") " pod="openstack/dnsmasq-dns-7fd796d7df-2lf6d" Sep 29 21:40:25 crc kubenswrapper[4911]: I0929 21:40:25.809639 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6ae2c7f1-3c57-4aa1-8d29-f56ab6f2dce9-ovsdbserver-nb\") pod \"dnsmasq-dns-7fd796d7df-2lf6d\" (UID: \"6ae2c7f1-3c57-4aa1-8d29-f56ab6f2dce9\") " pod="openstack/dnsmasq-dns-7fd796d7df-2lf6d" Sep 29 21:40:25 crc kubenswrapper[4911]: I0929 21:40:25.809923 4911 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f9c7e4f7-ea6f-4062-8215-f99cd085f5c0-config\") on node \"crc\" DevicePath \"\"" Sep 29 21:40:25 crc kubenswrapper[4911]: I0929 21:40:25.809968 4911 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f9c7e4f7-ea6f-4062-8215-f99cd085f5c0-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 21:40:25 crc kubenswrapper[4911]: I0929 21:40:25.809997 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n5glp\" (UniqueName: \"kubernetes.io/projected/9ba51b2f-ce7e-4d51-b30c-0917b93a2167-kube-api-access-n5glp\") on node \"crc\" DevicePath \"\"" Sep 29 21:40:25 crc kubenswrapper[4911]: I0929 21:40:25.810013 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4g6kg\" (UniqueName: \"kubernetes.io/projected/f9c7e4f7-ea6f-4062-8215-f99cd085f5c0-kube-api-access-4g6kg\") on node \"crc\" DevicePath \"\"" Sep 29 21:40:25 crc kubenswrapper[4911]: I0929 21:40:25.810028 4911 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9ba51b2f-ce7e-4d51-b30c-0917b93a2167-config\") on node \"crc\" DevicePath \"\"" Sep 29 21:40:25 crc kubenswrapper[4911]: I0929 21:40:25.912064 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ptqpj\" (UniqueName: \"kubernetes.io/projected/6ae2c7f1-3c57-4aa1-8d29-f56ab6f2dce9-kube-api-access-ptqpj\") pod \"dnsmasq-dns-7fd796d7df-2lf6d\" (UID: \"6ae2c7f1-3c57-4aa1-8d29-f56ab6f2dce9\") " pod="openstack/dnsmasq-dns-7fd796d7df-2lf6d" Sep 29 21:40:25 crc kubenswrapper[4911]: I0929 21:40:25.912114 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6ae2c7f1-3c57-4aa1-8d29-f56ab6f2dce9-config\") pod \"dnsmasq-dns-7fd796d7df-2lf6d\" (UID: \"6ae2c7f1-3c57-4aa1-8d29-f56ab6f2dce9\") " pod="openstack/dnsmasq-dns-7fd796d7df-2lf6d" Sep 29 21:40:25 crc kubenswrapper[4911]: I0929 21:40:25.912132 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6ae2c7f1-3c57-4aa1-8d29-f56ab6f2dce9-ovsdbserver-nb\") pod \"dnsmasq-dns-7fd796d7df-2lf6d\" (UID: \"6ae2c7f1-3c57-4aa1-8d29-f56ab6f2dce9\") " pod="openstack/dnsmasq-dns-7fd796d7df-2lf6d" Sep 29 21:40:25 crc kubenswrapper[4911]: I0929 21:40:25.912211 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6ae2c7f1-3c57-4aa1-8d29-f56ab6f2dce9-dns-svc\") pod \"dnsmasq-dns-7fd796d7df-2lf6d\" (UID: \"6ae2c7f1-3c57-4aa1-8d29-f56ab6f2dce9\") " pod="openstack/dnsmasq-dns-7fd796d7df-2lf6d" Sep 29 21:40:25 crc kubenswrapper[4911]: I0929 21:40:25.913020 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6ae2c7f1-3c57-4aa1-8d29-f56ab6f2dce9-config\") pod \"dnsmasq-dns-7fd796d7df-2lf6d\" (UID: \"6ae2c7f1-3c57-4aa1-8d29-f56ab6f2dce9\") " pod="openstack/dnsmasq-dns-7fd796d7df-2lf6d" Sep 29 21:40:25 crc kubenswrapper[4911]: I0929 21:40:25.913350 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6ae2c7f1-3c57-4aa1-8d29-f56ab6f2dce9-dns-svc\") pod \"dnsmasq-dns-7fd796d7df-2lf6d\" (UID: \"6ae2c7f1-3c57-4aa1-8d29-f56ab6f2dce9\") " pod="openstack/dnsmasq-dns-7fd796d7df-2lf6d" Sep 29 21:40:25 crc kubenswrapper[4911]: I0929 21:40:25.914566 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6ae2c7f1-3c57-4aa1-8d29-f56ab6f2dce9-ovsdbserver-nb\") pod \"dnsmasq-dns-7fd796d7df-2lf6d\" (UID: \"6ae2c7f1-3c57-4aa1-8d29-f56ab6f2dce9\") " pod="openstack/dnsmasq-dns-7fd796d7df-2lf6d" Sep 29 21:40:25 crc kubenswrapper[4911]: I0929 21:40:25.929679 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ptqpj\" (UniqueName: \"kubernetes.io/projected/6ae2c7f1-3c57-4aa1-8d29-f56ab6f2dce9-kube-api-access-ptqpj\") pod \"dnsmasq-dns-7fd796d7df-2lf6d\" (UID: \"6ae2c7f1-3c57-4aa1-8d29-f56ab6f2dce9\") " pod="openstack/dnsmasq-dns-7fd796d7df-2lf6d" Sep 29 21:40:26 crc kubenswrapper[4911]: I0929 21:40:26.029994 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fd796d7df-2lf6d" Sep 29 21:40:26 crc kubenswrapper[4911]: I0929 21:40:26.082916 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-prkv9"] Sep 29 21:40:26 crc kubenswrapper[4911]: I0929 21:40:26.150480 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-2cxwm" event={"ID":"f9c7e4f7-ea6f-4062-8215-f99cd085f5c0","Type":"ContainerDied","Data":"2229db9d4e8e4ea42e174854ff9108b93820bc0c53cbf1f9801078a175c1ab00"} Sep 29 21:40:26 crc kubenswrapper[4911]: I0929 21:40:26.150563 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-2cxwm" Sep 29 21:40:26 crc kubenswrapper[4911]: I0929 21:40:26.153946 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"3a5c8719-7840-4411-a449-81012851c24d","Type":"ContainerStarted","Data":"bfbbd588dfd0dd90a372f2d6f74501e4d6b9cae5079a2d8f83d90d044a04ead9"} Sep 29 21:40:26 crc kubenswrapper[4911]: I0929 21:40:26.161010 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-dwtnp" event={"ID":"f4dcfa4a-a27b-4b42-938d-d7509e1d2f20","Type":"ContainerStarted","Data":"a7adafe88e5657c3a8efd1d863a44f1876e9c32579273fb07c2264924ad349de"} Sep 29 21:40:26 crc kubenswrapper[4911]: I0929 21:40:26.163214 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-57d769cc4f-dwtnp" Sep 29 21:40:26 crc kubenswrapper[4911]: I0929 21:40:26.171220 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-4vjqh" event={"ID":"9ba51b2f-ce7e-4d51-b30c-0917b93a2167","Type":"ContainerDied","Data":"b25560403965ce5bf6cfff6505d90f95d03ea7c65e15f9c5c2f5916ee008bb93"} Sep 29 21:40:26 crc kubenswrapper[4911]: I0929 21:40:26.171348 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-4vjqh" Sep 29 21:40:26 crc kubenswrapper[4911]: I0929 21:40:26.180069 4911 generic.go:334] "Generic (PLEG): container finished" podID="d532207a-cd4a-4a4d-885e-c9d15d4d0d8a" containerID="86013188e5dcac523ee6dd34b3ea1c662cb01efbd5c3d02f87a3050774d9c44a" exitCode=0 Sep 29 21:40:26 crc kubenswrapper[4911]: I0929 21:40:26.180127 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-xd7cs" event={"ID":"d532207a-cd4a-4a4d-885e-c9d15d4d0d8a","Type":"ContainerDied","Data":"86013188e5dcac523ee6dd34b3ea1c662cb01efbd5c3d02f87a3050774d9c44a"} Sep 29 21:40:26 crc kubenswrapper[4911]: I0929 21:40:26.184582 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-57d769cc4f-dwtnp" podStartSLOduration=12.65561288 podStartE2EDuration="13.184569605s" podCreationTimestamp="2025-09-29 21:40:13 +0000 UTC" firstStartedPulling="2025-09-29 21:40:23.974205243 +0000 UTC m=+901.951317924" lastFinishedPulling="2025-09-29 21:40:24.503161978 +0000 UTC m=+902.480274649" observedRunningTime="2025-09-29 21:40:26.180839509 +0000 UTC m=+904.157952190" watchObservedRunningTime="2025-09-29 21:40:26.184569605 +0000 UTC m=+904.161682276" Sep 29 21:40:26 crc kubenswrapper[4911]: I0929 21:40:26.237592 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-2cxwm"] Sep 29 21:40:26 crc kubenswrapper[4911]: I0929 21:40:26.252421 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-2cxwm"] Sep 29 21:40:26 crc kubenswrapper[4911]: I0929 21:40:26.281395 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-4vjqh"] Sep 29 21:40:26 crc kubenswrapper[4911]: I0929 21:40:26.292733 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-4vjqh"] Sep 29 21:40:26 crc kubenswrapper[4911]: I0929 21:40:26.396334 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Sep 29 21:40:26 crc kubenswrapper[4911]: I0929 21:40:26.400590 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Sep 29 21:40:26 crc kubenswrapper[4911]: I0929 21:40:26.405211 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Sep 29 21:40:26 crc kubenswrapper[4911]: I0929 21:40:26.405466 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-t4c6h" Sep 29 21:40:26 crc kubenswrapper[4911]: I0929 21:40:26.406060 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Sep 29 21:40:26 crc kubenswrapper[4911]: I0929 21:40:26.406727 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Sep 29 21:40:26 crc kubenswrapper[4911]: I0929 21:40:26.416113 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Sep 29 21:40:26 crc kubenswrapper[4911]: I0929 21:40:26.533883 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3ad46165-7cec-489f-a199-71ed3a5f1c44-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"3ad46165-7cec-489f-a199-71ed3a5f1c44\") " pod="openstack/ovsdbserver-sb-0" Sep 29 21:40:26 crc kubenswrapper[4911]: I0929 21:40:26.533935 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ad46165-7cec-489f-a199-71ed3a5f1c44-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"3ad46165-7cec-489f-a199-71ed3a5f1c44\") " pod="openstack/ovsdbserver-sb-0" Sep 29 21:40:26 crc kubenswrapper[4911]: I0929 21:40:26.533992 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/3ad46165-7cec-489f-a199-71ed3a5f1c44-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"3ad46165-7cec-489f-a199-71ed3a5f1c44\") " pod="openstack/ovsdbserver-sb-0" Sep 29 21:40:26 crc kubenswrapper[4911]: I0929 21:40:26.534545 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/3ad46165-7cec-489f-a199-71ed3a5f1c44-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"3ad46165-7cec-489f-a199-71ed3a5f1c44\") " pod="openstack/ovsdbserver-sb-0" Sep 29 21:40:26 crc kubenswrapper[4911]: I0929 21:40:26.534575 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rp7zn\" (UniqueName: \"kubernetes.io/projected/3ad46165-7cec-489f-a199-71ed3a5f1c44-kube-api-access-rp7zn\") pod \"ovsdbserver-sb-0\" (UID: \"3ad46165-7cec-489f-a199-71ed3a5f1c44\") " pod="openstack/ovsdbserver-sb-0" Sep 29 21:40:26 crc kubenswrapper[4911]: I0929 21:40:26.534653 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3ad46165-7cec-489f-a199-71ed3a5f1c44-config\") pod \"ovsdbserver-sb-0\" (UID: \"3ad46165-7cec-489f-a199-71ed3a5f1c44\") " pod="openstack/ovsdbserver-sb-0" Sep 29 21:40:26 crc kubenswrapper[4911]: I0929 21:40:26.534695 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/3ad46165-7cec-489f-a199-71ed3a5f1c44-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"3ad46165-7cec-489f-a199-71ed3a5f1c44\") " pod="openstack/ovsdbserver-sb-0" Sep 29 21:40:26 crc kubenswrapper[4911]: I0929 21:40:26.534718 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"ovsdbserver-sb-0\" (UID: \"3ad46165-7cec-489f-a199-71ed3a5f1c44\") " pod="openstack/ovsdbserver-sb-0" Sep 29 21:40:26 crc kubenswrapper[4911]: I0929 21:40:26.636859 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/3ad46165-7cec-489f-a199-71ed3a5f1c44-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"3ad46165-7cec-489f-a199-71ed3a5f1c44\") " pod="openstack/ovsdbserver-sb-0" Sep 29 21:40:26 crc kubenswrapper[4911]: I0929 21:40:26.636912 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"ovsdbserver-sb-0\" (UID: \"3ad46165-7cec-489f-a199-71ed3a5f1c44\") " pod="openstack/ovsdbserver-sb-0" Sep 29 21:40:26 crc kubenswrapper[4911]: I0929 21:40:26.636968 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3ad46165-7cec-489f-a199-71ed3a5f1c44-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"3ad46165-7cec-489f-a199-71ed3a5f1c44\") " pod="openstack/ovsdbserver-sb-0" Sep 29 21:40:26 crc kubenswrapper[4911]: I0929 21:40:26.637025 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ad46165-7cec-489f-a199-71ed3a5f1c44-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"3ad46165-7cec-489f-a199-71ed3a5f1c44\") " pod="openstack/ovsdbserver-sb-0" Sep 29 21:40:26 crc kubenswrapper[4911]: I0929 21:40:26.637088 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/3ad46165-7cec-489f-a199-71ed3a5f1c44-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"3ad46165-7cec-489f-a199-71ed3a5f1c44\") " pod="openstack/ovsdbserver-sb-0" Sep 29 21:40:26 crc kubenswrapper[4911]: I0929 21:40:26.637145 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/3ad46165-7cec-489f-a199-71ed3a5f1c44-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"3ad46165-7cec-489f-a199-71ed3a5f1c44\") " pod="openstack/ovsdbserver-sb-0" Sep 29 21:40:26 crc kubenswrapper[4911]: I0929 21:40:26.637186 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rp7zn\" (UniqueName: \"kubernetes.io/projected/3ad46165-7cec-489f-a199-71ed3a5f1c44-kube-api-access-rp7zn\") pod \"ovsdbserver-sb-0\" (UID: \"3ad46165-7cec-489f-a199-71ed3a5f1c44\") " pod="openstack/ovsdbserver-sb-0" Sep 29 21:40:26 crc kubenswrapper[4911]: I0929 21:40:26.637203 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3ad46165-7cec-489f-a199-71ed3a5f1c44-config\") pod \"ovsdbserver-sb-0\" (UID: \"3ad46165-7cec-489f-a199-71ed3a5f1c44\") " pod="openstack/ovsdbserver-sb-0" Sep 29 21:40:26 crc kubenswrapper[4911]: I0929 21:40:26.638636 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3ad46165-7cec-489f-a199-71ed3a5f1c44-config\") pod \"ovsdbserver-sb-0\" (UID: \"3ad46165-7cec-489f-a199-71ed3a5f1c44\") " pod="openstack/ovsdbserver-sb-0" Sep 29 21:40:26 crc kubenswrapper[4911]: I0929 21:40:26.639183 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/3ad46165-7cec-489f-a199-71ed3a5f1c44-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"3ad46165-7cec-489f-a199-71ed3a5f1c44\") " pod="openstack/ovsdbserver-sb-0" Sep 29 21:40:26 crc kubenswrapper[4911]: I0929 21:40:26.639207 4911 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"ovsdbserver-sb-0\" (UID: \"3ad46165-7cec-489f-a199-71ed3a5f1c44\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/ovsdbserver-sb-0" Sep 29 21:40:26 crc kubenswrapper[4911]: I0929 21:40:26.640149 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3ad46165-7cec-489f-a199-71ed3a5f1c44-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"3ad46165-7cec-489f-a199-71ed3a5f1c44\") " pod="openstack/ovsdbserver-sb-0" Sep 29 21:40:26 crc kubenswrapper[4911]: I0929 21:40:26.646844 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ad46165-7cec-489f-a199-71ed3a5f1c44-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"3ad46165-7cec-489f-a199-71ed3a5f1c44\") " pod="openstack/ovsdbserver-sb-0" Sep 29 21:40:26 crc kubenswrapper[4911]: I0929 21:40:26.652042 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/3ad46165-7cec-489f-a199-71ed3a5f1c44-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"3ad46165-7cec-489f-a199-71ed3a5f1c44\") " pod="openstack/ovsdbserver-sb-0" Sep 29 21:40:26 crc kubenswrapper[4911]: I0929 21:40:26.654413 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rp7zn\" (UniqueName: \"kubernetes.io/projected/3ad46165-7cec-489f-a199-71ed3a5f1c44-kube-api-access-rp7zn\") pod \"ovsdbserver-sb-0\" (UID: \"3ad46165-7cec-489f-a199-71ed3a5f1c44\") " pod="openstack/ovsdbserver-sb-0" Sep 29 21:40:26 crc kubenswrapper[4911]: I0929 21:40:26.665046 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"ovsdbserver-sb-0\" (UID: \"3ad46165-7cec-489f-a199-71ed3a5f1c44\") " pod="openstack/ovsdbserver-sb-0" Sep 29 21:40:26 crc kubenswrapper[4911]: I0929 21:40:26.670748 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/3ad46165-7cec-489f-a199-71ed3a5f1c44-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"3ad46165-7cec-489f-a199-71ed3a5f1c44\") " pod="openstack/ovsdbserver-sb-0" Sep 29 21:40:26 crc kubenswrapper[4911]: I0929 21:40:26.724025 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9ba51b2f-ce7e-4d51-b30c-0917b93a2167" path="/var/lib/kubelet/pods/9ba51b2f-ce7e-4d51-b30c-0917b93a2167/volumes" Sep 29 21:40:26 crc kubenswrapper[4911]: I0929 21:40:26.724443 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f9c7e4f7-ea6f-4062-8215-f99cd085f5c0" path="/var/lib/kubelet/pods/f9c7e4f7-ea6f-4062-8215-f99cd085f5c0/volumes" Sep 29 21:40:26 crc kubenswrapper[4911]: I0929 21:40:26.747433 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Sep 29 21:40:27 crc kubenswrapper[4911]: W0929 21:40:27.911184 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb85ad830_8615_4f00_8d68_a2cb2b08dd68.slice/crio-993de87f452c9049ab3c25883215efb3fbc14c8f70f9f11f9b1f0d22b37970b0 WatchSource:0}: Error finding container 993de87f452c9049ab3c25883215efb3fbc14c8f70f9f11f9b1f0d22b37970b0: Status 404 returned error can't find the container with id 993de87f452c9049ab3c25883215efb3fbc14c8f70f9f11f9b1f0d22b37970b0 Sep 29 21:40:28 crc kubenswrapper[4911]: I0929 21:40:28.202253 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-prkv9" event={"ID":"b85ad830-8615-4f00-8d68-a2cb2b08dd68","Type":"ContainerStarted","Data":"993de87f452c9049ab3c25883215efb3fbc14c8f70f9f11f9b1f0d22b37970b0"} Sep 29 21:40:28 crc kubenswrapper[4911]: I0929 21:40:28.383155 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-2lf6d"] Sep 29 21:40:33 crc kubenswrapper[4911]: I0929 21:40:33.240705 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fd796d7df-2lf6d" event={"ID":"6ae2c7f1-3c57-4aa1-8d29-f56ab6f2dce9","Type":"ContainerStarted","Data":"044f8b41c351c3104cabc2d771e820518b107f5ce7dd35d6662c9f62b87206f2"} Sep 29 21:40:33 crc kubenswrapper[4911]: I0929 21:40:33.401346 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-57d769cc4f-dwtnp" Sep 29 21:40:34 crc kubenswrapper[4911]: I0929 21:40:34.309624 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Sep 29 21:40:34 crc kubenswrapper[4911]: W0929 21:40:34.399349 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3ad46165_7cec_489f_a199_71ed3a5f1c44.slice/crio-53a0a9b240f75610e9c82ba90b97812bb95a4f85e79327b62ef996f0767c7a55 WatchSource:0}: Error finding container 53a0a9b240f75610e9c82ba90b97812bb95a4f85e79327b62ef996f0767c7a55: Status 404 returned error can't find the container with id 53a0a9b240f75610e9c82ba90b97812bb95a4f85e79327b62ef996f0767c7a55 Sep 29 21:40:35 crc kubenswrapper[4911]: I0929 21:40:35.264043 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-xd7cs" event={"ID":"d532207a-cd4a-4a4d-885e-c9d15d4d0d8a","Type":"ContainerStarted","Data":"10ae117c50b260405b6840fa61ffc1545eef246ca5e07763796f1be8d1ae8ff8"} Sep 29 21:40:35 crc kubenswrapper[4911]: I0929 21:40:35.264724 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-666b6646f7-xd7cs" podUID="d532207a-cd4a-4a4d-885e-c9d15d4d0d8a" containerName="dnsmasq-dns" containerID="cri-o://10ae117c50b260405b6840fa61ffc1545eef246ca5e07763796f1be8d1ae8ff8" gracePeriod=10 Sep 29 21:40:35 crc kubenswrapper[4911]: I0929 21:40:35.265078 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-666b6646f7-xd7cs" Sep 29 21:40:35 crc kubenswrapper[4911]: I0929 21:40:35.267103 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"3ad46165-7cec-489f-a199-71ed3a5f1c44","Type":"ContainerStarted","Data":"53a0a9b240f75610e9c82ba90b97812bb95a4f85e79327b62ef996f0767c7a55"} Sep 29 21:40:35 crc kubenswrapper[4911]: I0929 21:40:35.269335 4911 generic.go:334] "Generic (PLEG): container finished" podID="6ae2c7f1-3c57-4aa1-8d29-f56ab6f2dce9" containerID="db974202c9c3560fbaccf9d193e431266b866d0b69cb77530a5c7fb424f1391e" exitCode=0 Sep 29 21:40:35 crc kubenswrapper[4911]: I0929 21:40:35.269430 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fd796d7df-2lf6d" event={"ID":"6ae2c7f1-3c57-4aa1-8d29-f56ab6f2dce9","Type":"ContainerDied","Data":"db974202c9c3560fbaccf9d193e431266b866d0b69cb77530a5c7fb424f1391e"} Sep 29 21:40:35 crc kubenswrapper[4911]: I0929 21:40:35.294259 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-666b6646f7-xd7cs" podStartSLOduration=23.294235965 podStartE2EDuration="23.294235965s" podCreationTimestamp="2025-09-29 21:40:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:40:35.287132465 +0000 UTC m=+913.264245136" watchObservedRunningTime="2025-09-29 21:40:35.294235965 +0000 UTC m=+913.271348636" Sep 29 21:40:36 crc kubenswrapper[4911]: I0929 21:40:36.279658 4911 generic.go:334] "Generic (PLEG): container finished" podID="d532207a-cd4a-4a4d-885e-c9d15d4d0d8a" containerID="10ae117c50b260405b6840fa61ffc1545eef246ca5e07763796f1be8d1ae8ff8" exitCode=0 Sep 29 21:40:36 crc kubenswrapper[4911]: I0929 21:40:36.279712 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-xd7cs" event={"ID":"d532207a-cd4a-4a4d-885e-c9d15d4d0d8a","Type":"ContainerDied","Data":"10ae117c50b260405b6840fa61ffc1545eef246ca5e07763796f1be8d1ae8ff8"} Sep 29 21:40:36 crc kubenswrapper[4911]: I0929 21:40:36.490768 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-xd7cs" Sep 29 21:40:36 crc kubenswrapper[4911]: I0929 21:40:36.636830 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2jj5s\" (UniqueName: \"kubernetes.io/projected/d532207a-cd4a-4a4d-885e-c9d15d4d0d8a-kube-api-access-2jj5s\") pod \"d532207a-cd4a-4a4d-885e-c9d15d4d0d8a\" (UID: \"d532207a-cd4a-4a4d-885e-c9d15d4d0d8a\") " Sep 29 21:40:36 crc kubenswrapper[4911]: I0929 21:40:36.637021 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d532207a-cd4a-4a4d-885e-c9d15d4d0d8a-config\") pod \"d532207a-cd4a-4a4d-885e-c9d15d4d0d8a\" (UID: \"d532207a-cd4a-4a4d-885e-c9d15d4d0d8a\") " Sep 29 21:40:36 crc kubenswrapper[4911]: I0929 21:40:36.637067 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d532207a-cd4a-4a4d-885e-c9d15d4d0d8a-dns-svc\") pod \"d532207a-cd4a-4a4d-885e-c9d15d4d0d8a\" (UID: \"d532207a-cd4a-4a4d-885e-c9d15d4d0d8a\") " Sep 29 21:40:36 crc kubenswrapper[4911]: I0929 21:40:36.642369 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d532207a-cd4a-4a4d-885e-c9d15d4d0d8a-kube-api-access-2jj5s" (OuterVolumeSpecName: "kube-api-access-2jj5s") pod "d532207a-cd4a-4a4d-885e-c9d15d4d0d8a" (UID: "d532207a-cd4a-4a4d-885e-c9d15d4d0d8a"). InnerVolumeSpecName "kube-api-access-2jj5s". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:40:36 crc kubenswrapper[4911]: I0929 21:40:36.678967 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d532207a-cd4a-4a4d-885e-c9d15d4d0d8a-config" (OuterVolumeSpecName: "config") pod "d532207a-cd4a-4a4d-885e-c9d15d4d0d8a" (UID: "d532207a-cd4a-4a4d-885e-c9d15d4d0d8a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:40:36 crc kubenswrapper[4911]: I0929 21:40:36.682321 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d532207a-cd4a-4a4d-885e-c9d15d4d0d8a-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d532207a-cd4a-4a4d-885e-c9d15d4d0d8a" (UID: "d532207a-cd4a-4a4d-885e-c9d15d4d0d8a"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:40:36 crc kubenswrapper[4911]: I0929 21:40:36.739561 4911 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d532207a-cd4a-4a4d-885e-c9d15d4d0d8a-config\") on node \"crc\" DevicePath \"\"" Sep 29 21:40:36 crc kubenswrapper[4911]: I0929 21:40:36.739609 4911 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d532207a-cd4a-4a4d-885e-c9d15d4d0d8a-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 21:40:36 crc kubenswrapper[4911]: I0929 21:40:36.739623 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2jj5s\" (UniqueName: \"kubernetes.io/projected/d532207a-cd4a-4a4d-885e-c9d15d4d0d8a-kube-api-access-2jj5s\") on node \"crc\" DevicePath \"\"" Sep 29 21:40:37 crc kubenswrapper[4911]: I0929 21:40:37.287830 4911 generic.go:334] "Generic (PLEG): container finished" podID="30d80d24-2072-4fa9-aa03-4448c693ec5f" containerID="548f341215db50333f79531886ec8659209295cacc925834494beefd9114388c" exitCode=0 Sep 29 21:40:37 crc kubenswrapper[4911]: I0929 21:40:37.289245 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-xhwxg" event={"ID":"30d80d24-2072-4fa9-aa03-4448c693ec5f","Type":"ContainerDied","Data":"548f341215db50333f79531886ec8659209295cacc925834494beefd9114388c"} Sep 29 21:40:37 crc kubenswrapper[4911]: I0929 21:40:37.293512 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"16704d0f-ad69-4cc9-890a-77c268d78151","Type":"ContainerStarted","Data":"337a146ff0ca619afb1efb1546eb8d6c2739a71f7a38410dc78114bda5bfa1ee"} Sep 29 21:40:37 crc kubenswrapper[4911]: I0929 21:40:37.296066 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"5849b99d-1ca7-4258-b88a-704a89d46c4e","Type":"ContainerStarted","Data":"b97dcfc96ad8889ebcc3e011b32ba13c97a7b6a41ae8fbeba829c3d6c8ac9e99"} Sep 29 21:40:37 crc kubenswrapper[4911]: I0929 21:40:37.296884 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Sep 29 21:40:37 crc kubenswrapper[4911]: I0929 21:40:37.299930 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"9fb504fe-401c-4b1d-af71-171d017883be","Type":"ContainerStarted","Data":"f0a4291b7cebf5d29109ab1fb3fbbf3a7a97b3d7d77f4d1f16f51110934bfed0"} Sep 29 21:40:37 crc kubenswrapper[4911]: I0929 21:40:37.303674 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"3a5c8719-7840-4411-a449-81012851c24d","Type":"ContainerStarted","Data":"dc6390a245b35d023d874599225b04a340d604ecdeb4228cf9a38c2c56ec8d61"} Sep 29 21:40:37 crc kubenswrapper[4911]: I0929 21:40:37.303867 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Sep 29 21:40:37 crc kubenswrapper[4911]: I0929 21:40:37.307064 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fd796d7df-2lf6d" event={"ID":"6ae2c7f1-3c57-4aa1-8d29-f56ab6f2dce9","Type":"ContainerStarted","Data":"bcc5a9648d2f37a7659fe7b75cbc5583fb061017b3f1be289abbeea35e80e192"} Sep 29 21:40:37 crc kubenswrapper[4911]: I0929 21:40:37.307224 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7fd796d7df-2lf6d" Sep 29 21:40:37 crc kubenswrapper[4911]: I0929 21:40:37.308769 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"54bedb11-6943-4e34-a221-8dbd2cfd5eee","Type":"ContainerStarted","Data":"6332677c2f853df112183ac5da0e7e95e8fdbc8790b02d3fe1434e560e32033d"} Sep 29 21:40:37 crc kubenswrapper[4911]: I0929 21:40:37.319148 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-xd7cs" event={"ID":"d532207a-cd4a-4a4d-885e-c9d15d4d0d8a","Type":"ContainerDied","Data":"a6beebddb6bb8ff22ffbb0e818153f193e17d50d225dbffb3b2cd02d1ed7b256"} Sep 29 21:40:37 crc kubenswrapper[4911]: I0929 21:40:37.319198 4911 scope.go:117] "RemoveContainer" containerID="10ae117c50b260405b6840fa61ffc1545eef246ca5e07763796f1be8d1ae8ff8" Sep 29 21:40:37 crc kubenswrapper[4911]: I0929 21:40:37.319350 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-xd7cs" Sep 29 21:40:37 crc kubenswrapper[4911]: I0929 21:40:37.356895 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=7.57364681 podStartE2EDuration="19.356879082s" podCreationTimestamp="2025-09-29 21:40:18 +0000 UTC" firstStartedPulling="2025-09-29 21:40:25.097007092 +0000 UTC m=+903.074119763" lastFinishedPulling="2025-09-29 21:40:36.880239364 +0000 UTC m=+914.857352035" observedRunningTime="2025-09-29 21:40:37.356864371 +0000 UTC m=+915.333977052" watchObservedRunningTime="2025-09-29 21:40:37.356879082 +0000 UTC m=+915.333991763" Sep 29 21:40:37 crc kubenswrapper[4911]: I0929 21:40:37.358869 4911 scope.go:117] "RemoveContainer" containerID="86013188e5dcac523ee6dd34b3ea1c662cb01efbd5c3d02f87a3050774d9c44a" Sep 29 21:40:37 crc kubenswrapper[4911]: I0929 21:40:37.382948 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7fd796d7df-2lf6d" podStartSLOduration=12.382926482 podStartE2EDuration="12.382926482s" podCreationTimestamp="2025-09-29 21:40:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:40:37.378395351 +0000 UTC m=+915.355508022" watchObservedRunningTime="2025-09-29 21:40:37.382926482 +0000 UTC m=+915.360039153" Sep 29 21:40:37 crc kubenswrapper[4911]: I0929 21:40:37.468968 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=11.679889243 podStartE2EDuration="20.468951449s" podCreationTimestamp="2025-09-29 21:40:17 +0000 UTC" firstStartedPulling="2025-09-29 21:40:25.019952446 +0000 UTC m=+902.997065117" lastFinishedPulling="2025-09-29 21:40:33.809014642 +0000 UTC m=+911.786127323" observedRunningTime="2025-09-29 21:40:37.465886663 +0000 UTC m=+915.442999334" watchObservedRunningTime="2025-09-29 21:40:37.468951449 +0000 UTC m=+915.446064130" Sep 29 21:40:37 crc kubenswrapper[4911]: I0929 21:40:37.492966 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-xd7cs"] Sep 29 21:40:37 crc kubenswrapper[4911]: I0929 21:40:37.501064 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-xd7cs"] Sep 29 21:40:38 crc kubenswrapper[4911]: I0929 21:40:38.332397 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"df57577d-bb06-4339-a3e6-27a2cf733d17","Type":"ContainerStarted","Data":"2bf107bfb438ad14f6a36882df2aafdc13d5c6cee07bd61a604c6ac73512a9ce"} Sep 29 21:40:38 crc kubenswrapper[4911]: I0929 21:40:38.337006 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"7ccc0850-1d12-486d-bb28-2ebd69c456e0","Type":"ContainerStarted","Data":"337b305de619f1c21607dabafa30b35f6deb9f9726689ead969da9da7ca6fc9e"} Sep 29 21:40:38 crc kubenswrapper[4911]: I0929 21:40:38.337084 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"7ccc0850-1d12-486d-bb28-2ebd69c456e0","Type":"ContainerStarted","Data":"00de58f43b2cfa545ea1178ee4cc91cc0bfd69aa4c146ef57b22432666022752"} Sep 29 21:40:38 crc kubenswrapper[4911]: I0929 21:40:38.341478 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-xhwxg" event={"ID":"30d80d24-2072-4fa9-aa03-4448c693ec5f","Type":"ContainerStarted","Data":"19c3c4059093dd9723abb5d66dc268d2add7409bb01dea9e33475f0f254d7f23"} Sep 29 21:40:38 crc kubenswrapper[4911]: I0929 21:40:38.341527 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-xhwxg" event={"ID":"30d80d24-2072-4fa9-aa03-4448c693ec5f","Type":"ContainerStarted","Data":"fc662e4b760fbb101d6b22b925e07caf92b97f34d20faeddcfd8e1d69f27d0f3"} Sep 29 21:40:38 crc kubenswrapper[4911]: I0929 21:40:38.341651 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-xhwxg" Sep 29 21:40:38 crc kubenswrapper[4911]: I0929 21:40:38.344495 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"3ad46165-7cec-489f-a199-71ed3a5f1c44","Type":"ContainerStarted","Data":"a831130b3e0e3cb4864f1dfa3460009b50edd852a0420f90fbbc4c1213936002"} Sep 29 21:40:38 crc kubenswrapper[4911]: I0929 21:40:38.344559 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"3ad46165-7cec-489f-a199-71ed3a5f1c44","Type":"ContainerStarted","Data":"0fa9827423a27f805acbf0bd7ba527793fb184ced5fcccf566886c3625e18814"} Sep 29 21:40:38 crc kubenswrapper[4911]: I0929 21:40:38.347680 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-nm8s4" event={"ID":"755d3290-eae3-4e58-9870-63681ce460d5","Type":"ContainerStarted","Data":"77bb549434026d9e2349439ab1c47c227f818f0b860ad7b6139919b1e750a75b"} Sep 29 21:40:38 crc kubenswrapper[4911]: I0929 21:40:38.347910 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-nm8s4" Sep 29 21:40:38 crc kubenswrapper[4911]: I0929 21:40:38.350652 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-prkv9" event={"ID":"b85ad830-8615-4f00-8d68-a2cb2b08dd68","Type":"ContainerStarted","Data":"3d58cdc395dea42d29a7726e06c44cbe326e1772511270d3d7776f1c891be159"} Sep 29 21:40:38 crc kubenswrapper[4911]: I0929 21:40:38.385708 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=10.907499463 podStartE2EDuration="13.385652875s" podCreationTimestamp="2025-09-29 21:40:25 +0000 UTC" firstStartedPulling="2025-09-29 21:40:34.40265975 +0000 UTC m=+912.379772421" lastFinishedPulling="2025-09-29 21:40:36.880813142 +0000 UTC m=+914.857925833" observedRunningTime="2025-09-29 21:40:38.383870631 +0000 UTC m=+916.360983332" watchObservedRunningTime="2025-09-29 21:40:38.385652875 +0000 UTC m=+916.362765606" Sep 29 21:40:38 crc kubenswrapper[4911]: I0929 21:40:38.417617 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=7.312044456 podStartE2EDuration="16.417594129s" podCreationTimestamp="2025-09-29 21:40:22 +0000 UTC" firstStartedPulling="2025-09-29 21:40:24.849280436 +0000 UTC m=+902.826393107" lastFinishedPulling="2025-09-29 21:40:33.954830109 +0000 UTC m=+911.931942780" observedRunningTime="2025-09-29 21:40:38.414812743 +0000 UTC m=+916.391925454" watchObservedRunningTime="2025-09-29 21:40:38.417594129 +0000 UTC m=+916.394706830" Sep 29 21:40:38 crc kubenswrapper[4911]: I0929 21:40:38.455043 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-prkv9" podStartSLOduration=4.506960281 podStartE2EDuration="13.455020024s" podCreationTimestamp="2025-09-29 21:40:25 +0000 UTC" firstStartedPulling="2025-09-29 21:40:27.914329766 +0000 UTC m=+905.891442477" lastFinishedPulling="2025-09-29 21:40:36.862389509 +0000 UTC m=+914.839502220" observedRunningTime="2025-09-29 21:40:38.443825145 +0000 UTC m=+916.420937836" watchObservedRunningTime="2025-09-29 21:40:38.455020024 +0000 UTC m=+916.432132735" Sep 29 21:40:38 crc kubenswrapper[4911]: I0929 21:40:38.489963 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-nm8s4" podStartSLOduration=7.313974538 podStartE2EDuration="16.4899387s" podCreationTimestamp="2025-09-29 21:40:22 +0000 UTC" firstStartedPulling="2025-09-29 21:40:24.785499992 +0000 UTC m=+902.762612663" lastFinishedPulling="2025-09-29 21:40:33.961464154 +0000 UTC m=+911.938576825" observedRunningTime="2025-09-29 21:40:38.483853931 +0000 UTC m=+916.460966612" watchObservedRunningTime="2025-09-29 21:40:38.4899387 +0000 UTC m=+916.467051371" Sep 29 21:40:38 crc kubenswrapper[4911]: I0929 21:40:38.512506 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-xhwxg" podStartSLOduration=7.678701653 podStartE2EDuration="16.512487021s" podCreationTimestamp="2025-09-29 21:40:22 +0000 UTC" firstStartedPulling="2025-09-29 21:40:24.969477245 +0000 UTC m=+902.946589916" lastFinishedPulling="2025-09-29 21:40:33.803262593 +0000 UTC m=+911.780375284" observedRunningTime="2025-09-29 21:40:38.511716068 +0000 UTC m=+916.488828749" watchObservedRunningTime="2025-09-29 21:40:38.512487021 +0000 UTC m=+916.489599722" Sep 29 21:40:38 crc kubenswrapper[4911]: I0929 21:40:38.709612 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d532207a-cd4a-4a4d-885e-c9d15d4d0d8a" path="/var/lib/kubelet/pods/d532207a-cd4a-4a4d-885e-c9d15d4d0d8a/volumes" Sep 29 21:40:38 crc kubenswrapper[4911]: I0929 21:40:38.747864 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Sep 29 21:40:38 crc kubenswrapper[4911]: I0929 21:40:38.819183 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-2lf6d"] Sep 29 21:40:38 crc kubenswrapper[4911]: I0929 21:40:38.843133 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-gptbl"] Sep 29 21:40:38 crc kubenswrapper[4911]: E0929 21:40:38.843608 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d532207a-cd4a-4a4d-885e-c9d15d4d0d8a" containerName="init" Sep 29 21:40:38 crc kubenswrapper[4911]: I0929 21:40:38.843702 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="d532207a-cd4a-4a4d-885e-c9d15d4d0d8a" containerName="init" Sep 29 21:40:38 crc kubenswrapper[4911]: E0929 21:40:38.843804 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d532207a-cd4a-4a4d-885e-c9d15d4d0d8a" containerName="dnsmasq-dns" Sep 29 21:40:38 crc kubenswrapper[4911]: I0929 21:40:38.843857 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="d532207a-cd4a-4a4d-885e-c9d15d4d0d8a" containerName="dnsmasq-dns" Sep 29 21:40:38 crc kubenswrapper[4911]: I0929 21:40:38.844064 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="d532207a-cd4a-4a4d-885e-c9d15d4d0d8a" containerName="dnsmasq-dns" Sep 29 21:40:38 crc kubenswrapper[4911]: I0929 21:40:38.844894 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-gptbl" Sep 29 21:40:38 crc kubenswrapper[4911]: I0929 21:40:38.848834 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Sep 29 21:40:38 crc kubenswrapper[4911]: I0929 21:40:38.855695 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-gptbl"] Sep 29 21:40:38 crc kubenswrapper[4911]: I0929 21:40:38.889898 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a820cb69-2ee4-4fd1-a51e-9852bd1151fe-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-gptbl\" (UID: \"a820cb69-2ee4-4fd1-a51e-9852bd1151fe\") " pod="openstack/dnsmasq-dns-86db49b7ff-gptbl" Sep 29 21:40:38 crc kubenswrapper[4911]: I0929 21:40:38.889966 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a820cb69-2ee4-4fd1-a51e-9852bd1151fe-config\") pod \"dnsmasq-dns-86db49b7ff-gptbl\" (UID: \"a820cb69-2ee4-4fd1-a51e-9852bd1151fe\") " pod="openstack/dnsmasq-dns-86db49b7ff-gptbl" Sep 29 21:40:38 crc kubenswrapper[4911]: I0929 21:40:38.890046 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dw4dk\" (UniqueName: \"kubernetes.io/projected/a820cb69-2ee4-4fd1-a51e-9852bd1151fe-kube-api-access-dw4dk\") pod \"dnsmasq-dns-86db49b7ff-gptbl\" (UID: \"a820cb69-2ee4-4fd1-a51e-9852bd1151fe\") " pod="openstack/dnsmasq-dns-86db49b7ff-gptbl" Sep 29 21:40:38 crc kubenswrapper[4911]: I0929 21:40:38.890082 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a820cb69-2ee4-4fd1-a51e-9852bd1151fe-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-gptbl\" (UID: \"a820cb69-2ee4-4fd1-a51e-9852bd1151fe\") " pod="openstack/dnsmasq-dns-86db49b7ff-gptbl" Sep 29 21:40:38 crc kubenswrapper[4911]: I0929 21:40:38.890162 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a820cb69-2ee4-4fd1-a51e-9852bd1151fe-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-gptbl\" (UID: \"a820cb69-2ee4-4fd1-a51e-9852bd1151fe\") " pod="openstack/dnsmasq-dns-86db49b7ff-gptbl" Sep 29 21:40:38 crc kubenswrapper[4911]: I0929 21:40:38.946749 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Sep 29 21:40:38 crc kubenswrapper[4911]: I0929 21:40:38.946805 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Sep 29 21:40:38 crc kubenswrapper[4911]: I0929 21:40:38.991461 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a820cb69-2ee4-4fd1-a51e-9852bd1151fe-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-gptbl\" (UID: \"a820cb69-2ee4-4fd1-a51e-9852bd1151fe\") " pod="openstack/dnsmasq-dns-86db49b7ff-gptbl" Sep 29 21:40:38 crc kubenswrapper[4911]: I0929 21:40:38.991535 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a820cb69-2ee4-4fd1-a51e-9852bd1151fe-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-gptbl\" (UID: \"a820cb69-2ee4-4fd1-a51e-9852bd1151fe\") " pod="openstack/dnsmasq-dns-86db49b7ff-gptbl" Sep 29 21:40:38 crc kubenswrapper[4911]: I0929 21:40:38.991579 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a820cb69-2ee4-4fd1-a51e-9852bd1151fe-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-gptbl\" (UID: \"a820cb69-2ee4-4fd1-a51e-9852bd1151fe\") " pod="openstack/dnsmasq-dns-86db49b7ff-gptbl" Sep 29 21:40:38 crc kubenswrapper[4911]: I0929 21:40:38.991605 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a820cb69-2ee4-4fd1-a51e-9852bd1151fe-config\") pod \"dnsmasq-dns-86db49b7ff-gptbl\" (UID: \"a820cb69-2ee4-4fd1-a51e-9852bd1151fe\") " pod="openstack/dnsmasq-dns-86db49b7ff-gptbl" Sep 29 21:40:38 crc kubenswrapper[4911]: I0929 21:40:38.991662 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dw4dk\" (UniqueName: \"kubernetes.io/projected/a820cb69-2ee4-4fd1-a51e-9852bd1151fe-kube-api-access-dw4dk\") pod \"dnsmasq-dns-86db49b7ff-gptbl\" (UID: \"a820cb69-2ee4-4fd1-a51e-9852bd1151fe\") " pod="openstack/dnsmasq-dns-86db49b7ff-gptbl" Sep 29 21:40:38 crc kubenswrapper[4911]: I0929 21:40:38.992504 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a820cb69-2ee4-4fd1-a51e-9852bd1151fe-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-gptbl\" (UID: \"a820cb69-2ee4-4fd1-a51e-9852bd1151fe\") " pod="openstack/dnsmasq-dns-86db49b7ff-gptbl" Sep 29 21:40:38 crc kubenswrapper[4911]: I0929 21:40:38.992534 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a820cb69-2ee4-4fd1-a51e-9852bd1151fe-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-gptbl\" (UID: \"a820cb69-2ee4-4fd1-a51e-9852bd1151fe\") " pod="openstack/dnsmasq-dns-86db49b7ff-gptbl" Sep 29 21:40:38 crc kubenswrapper[4911]: I0929 21:40:38.992783 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a820cb69-2ee4-4fd1-a51e-9852bd1151fe-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-gptbl\" (UID: \"a820cb69-2ee4-4fd1-a51e-9852bd1151fe\") " pod="openstack/dnsmasq-dns-86db49b7ff-gptbl" Sep 29 21:40:38 crc kubenswrapper[4911]: I0929 21:40:38.993083 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a820cb69-2ee4-4fd1-a51e-9852bd1151fe-config\") pod \"dnsmasq-dns-86db49b7ff-gptbl\" (UID: \"a820cb69-2ee4-4fd1-a51e-9852bd1151fe\") " pod="openstack/dnsmasq-dns-86db49b7ff-gptbl" Sep 29 21:40:39 crc kubenswrapper[4911]: I0929 21:40:39.009045 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dw4dk\" (UniqueName: \"kubernetes.io/projected/a820cb69-2ee4-4fd1-a51e-9852bd1151fe-kube-api-access-dw4dk\") pod \"dnsmasq-dns-86db49b7ff-gptbl\" (UID: \"a820cb69-2ee4-4fd1-a51e-9852bd1151fe\") " pod="openstack/dnsmasq-dns-86db49b7ff-gptbl" Sep 29 21:40:39 crc kubenswrapper[4911]: I0929 21:40:39.198872 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-gptbl" Sep 29 21:40:39 crc kubenswrapper[4911]: I0929 21:40:39.360173 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-xhwxg" Sep 29 21:40:39 crc kubenswrapper[4911]: I0929 21:40:39.360622 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7fd796d7df-2lf6d" podUID="6ae2c7f1-3c57-4aa1-8d29-f56ab6f2dce9" containerName="dnsmasq-dns" containerID="cri-o://bcc5a9648d2f37a7659fe7b75cbc5583fb061017b3f1be289abbeea35e80e192" gracePeriod=10 Sep 29 21:40:39 crc kubenswrapper[4911]: I0929 21:40:39.706468 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-gptbl"] Sep 29 21:40:39 crc kubenswrapper[4911]: I0929 21:40:39.775323 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fd796d7df-2lf6d" Sep 29 21:40:39 crc kubenswrapper[4911]: I0929 21:40:39.802750 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6ae2c7f1-3c57-4aa1-8d29-f56ab6f2dce9-ovsdbserver-nb\") pod \"6ae2c7f1-3c57-4aa1-8d29-f56ab6f2dce9\" (UID: \"6ae2c7f1-3c57-4aa1-8d29-f56ab6f2dce9\") " Sep 29 21:40:39 crc kubenswrapper[4911]: I0929 21:40:39.802811 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6ae2c7f1-3c57-4aa1-8d29-f56ab6f2dce9-dns-svc\") pod \"6ae2c7f1-3c57-4aa1-8d29-f56ab6f2dce9\" (UID: \"6ae2c7f1-3c57-4aa1-8d29-f56ab6f2dce9\") " Sep 29 21:40:39 crc kubenswrapper[4911]: I0929 21:40:39.802915 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ptqpj\" (UniqueName: \"kubernetes.io/projected/6ae2c7f1-3c57-4aa1-8d29-f56ab6f2dce9-kube-api-access-ptqpj\") pod \"6ae2c7f1-3c57-4aa1-8d29-f56ab6f2dce9\" (UID: \"6ae2c7f1-3c57-4aa1-8d29-f56ab6f2dce9\") " Sep 29 21:40:39 crc kubenswrapper[4911]: I0929 21:40:39.802967 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6ae2c7f1-3c57-4aa1-8d29-f56ab6f2dce9-config\") pod \"6ae2c7f1-3c57-4aa1-8d29-f56ab6f2dce9\" (UID: \"6ae2c7f1-3c57-4aa1-8d29-f56ab6f2dce9\") " Sep 29 21:40:39 crc kubenswrapper[4911]: I0929 21:40:39.830246 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ae2c7f1-3c57-4aa1-8d29-f56ab6f2dce9-kube-api-access-ptqpj" (OuterVolumeSpecName: "kube-api-access-ptqpj") pod "6ae2c7f1-3c57-4aa1-8d29-f56ab6f2dce9" (UID: "6ae2c7f1-3c57-4aa1-8d29-f56ab6f2dce9"). InnerVolumeSpecName "kube-api-access-ptqpj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:40:39 crc kubenswrapper[4911]: I0929 21:40:39.856113 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ae2c7f1-3c57-4aa1-8d29-f56ab6f2dce9-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "6ae2c7f1-3c57-4aa1-8d29-f56ab6f2dce9" (UID: "6ae2c7f1-3c57-4aa1-8d29-f56ab6f2dce9"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:40:39 crc kubenswrapper[4911]: I0929 21:40:39.867445 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ae2c7f1-3c57-4aa1-8d29-f56ab6f2dce9-config" (OuterVolumeSpecName: "config") pod "6ae2c7f1-3c57-4aa1-8d29-f56ab6f2dce9" (UID: "6ae2c7f1-3c57-4aa1-8d29-f56ab6f2dce9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:40:39 crc kubenswrapper[4911]: I0929 21:40:39.872049 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ae2c7f1-3c57-4aa1-8d29-f56ab6f2dce9-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "6ae2c7f1-3c57-4aa1-8d29-f56ab6f2dce9" (UID: "6ae2c7f1-3c57-4aa1-8d29-f56ab6f2dce9"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:40:39 crc kubenswrapper[4911]: I0929 21:40:39.904970 4911 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6ae2c7f1-3c57-4aa1-8d29-f56ab6f2dce9-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 21:40:39 crc kubenswrapper[4911]: I0929 21:40:39.904997 4911 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6ae2c7f1-3c57-4aa1-8d29-f56ab6f2dce9-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 21:40:39 crc kubenswrapper[4911]: I0929 21:40:39.905007 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ptqpj\" (UniqueName: \"kubernetes.io/projected/6ae2c7f1-3c57-4aa1-8d29-f56ab6f2dce9-kube-api-access-ptqpj\") on node \"crc\" DevicePath \"\"" Sep 29 21:40:39 crc kubenswrapper[4911]: I0929 21:40:39.905020 4911 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6ae2c7f1-3c57-4aa1-8d29-f56ab6f2dce9-config\") on node \"crc\" DevicePath \"\"" Sep 29 21:40:40 crc kubenswrapper[4911]: I0929 21:40:40.382352 4911 generic.go:334] "Generic (PLEG): container finished" podID="6ae2c7f1-3c57-4aa1-8d29-f56ab6f2dce9" containerID="bcc5a9648d2f37a7659fe7b75cbc5583fb061017b3f1be289abbeea35e80e192" exitCode=0 Sep 29 21:40:40 crc kubenswrapper[4911]: I0929 21:40:40.382424 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fd796d7df-2lf6d" event={"ID":"6ae2c7f1-3c57-4aa1-8d29-f56ab6f2dce9","Type":"ContainerDied","Data":"bcc5a9648d2f37a7659fe7b75cbc5583fb061017b3f1be289abbeea35e80e192"} Sep 29 21:40:40 crc kubenswrapper[4911]: I0929 21:40:40.382451 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fd796d7df-2lf6d" event={"ID":"6ae2c7f1-3c57-4aa1-8d29-f56ab6f2dce9","Type":"ContainerDied","Data":"044f8b41c351c3104cabc2d771e820518b107f5ce7dd35d6662c9f62b87206f2"} Sep 29 21:40:40 crc kubenswrapper[4911]: I0929 21:40:40.382484 4911 scope.go:117] "RemoveContainer" containerID="bcc5a9648d2f37a7659fe7b75cbc5583fb061017b3f1be289abbeea35e80e192" Sep 29 21:40:40 crc kubenswrapper[4911]: I0929 21:40:40.382517 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fd796d7df-2lf6d" Sep 29 21:40:40 crc kubenswrapper[4911]: I0929 21:40:40.387036 4911 generic.go:334] "Generic (PLEG): container finished" podID="a820cb69-2ee4-4fd1-a51e-9852bd1151fe" containerID="b1f5f0a5bdc209617a41b00b86ca8c73b42b4ca239520bcf0aa75d5056f9f52b" exitCode=0 Sep 29 21:40:40 crc kubenswrapper[4911]: I0929 21:40:40.387161 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-gptbl" event={"ID":"a820cb69-2ee4-4fd1-a51e-9852bd1151fe","Type":"ContainerDied","Data":"b1f5f0a5bdc209617a41b00b86ca8c73b42b4ca239520bcf0aa75d5056f9f52b"} Sep 29 21:40:40 crc kubenswrapper[4911]: I0929 21:40:40.387949 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-gptbl" event={"ID":"a820cb69-2ee4-4fd1-a51e-9852bd1151fe","Type":"ContainerStarted","Data":"1b5798274458c7828d874a28f82cca2ecb168bfebf023920ec03617f926b263b"} Sep 29 21:40:40 crc kubenswrapper[4911]: I0929 21:40:40.433028 4911 scope.go:117] "RemoveContainer" containerID="db974202c9c3560fbaccf9d193e431266b866d0b69cb77530a5c7fb424f1391e" Sep 29 21:40:40 crc kubenswrapper[4911]: I0929 21:40:40.570217 4911 scope.go:117] "RemoveContainer" containerID="bcc5a9648d2f37a7659fe7b75cbc5583fb061017b3f1be289abbeea35e80e192" Sep 29 21:40:40 crc kubenswrapper[4911]: E0929 21:40:40.570670 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bcc5a9648d2f37a7659fe7b75cbc5583fb061017b3f1be289abbeea35e80e192\": container with ID starting with bcc5a9648d2f37a7659fe7b75cbc5583fb061017b3f1be289abbeea35e80e192 not found: ID does not exist" containerID="bcc5a9648d2f37a7659fe7b75cbc5583fb061017b3f1be289abbeea35e80e192" Sep 29 21:40:40 crc kubenswrapper[4911]: I0929 21:40:40.570698 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bcc5a9648d2f37a7659fe7b75cbc5583fb061017b3f1be289abbeea35e80e192"} err="failed to get container status \"bcc5a9648d2f37a7659fe7b75cbc5583fb061017b3f1be289abbeea35e80e192\": rpc error: code = NotFound desc = could not find container \"bcc5a9648d2f37a7659fe7b75cbc5583fb061017b3f1be289abbeea35e80e192\": container with ID starting with bcc5a9648d2f37a7659fe7b75cbc5583fb061017b3f1be289abbeea35e80e192 not found: ID does not exist" Sep 29 21:40:40 crc kubenswrapper[4911]: I0929 21:40:40.570717 4911 scope.go:117] "RemoveContainer" containerID="db974202c9c3560fbaccf9d193e431266b866d0b69cb77530a5c7fb424f1391e" Sep 29 21:40:40 crc kubenswrapper[4911]: E0929 21:40:40.570945 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"db974202c9c3560fbaccf9d193e431266b866d0b69cb77530a5c7fb424f1391e\": container with ID starting with db974202c9c3560fbaccf9d193e431266b866d0b69cb77530a5c7fb424f1391e not found: ID does not exist" containerID="db974202c9c3560fbaccf9d193e431266b866d0b69cb77530a5c7fb424f1391e" Sep 29 21:40:40 crc kubenswrapper[4911]: I0929 21:40:40.570961 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"db974202c9c3560fbaccf9d193e431266b866d0b69cb77530a5c7fb424f1391e"} err="failed to get container status \"db974202c9c3560fbaccf9d193e431266b866d0b69cb77530a5c7fb424f1391e\": rpc error: code = NotFound desc = could not find container \"db974202c9c3560fbaccf9d193e431266b866d0b69cb77530a5c7fb424f1391e\": container with ID starting with db974202c9c3560fbaccf9d193e431266b866d0b69cb77530a5c7fb424f1391e not found: ID does not exist" Sep 29 21:40:40 crc kubenswrapper[4911]: I0929 21:40:40.629530 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-2lf6d"] Sep 29 21:40:40 crc kubenswrapper[4911]: I0929 21:40:40.635175 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-2lf6d"] Sep 29 21:40:40 crc kubenswrapper[4911]: I0929 21:40:40.711566 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ae2c7f1-3c57-4aa1-8d29-f56ab6f2dce9" path="/var/lib/kubelet/pods/6ae2c7f1-3c57-4aa1-8d29-f56ab6f2dce9/volumes" Sep 29 21:40:41 crc kubenswrapper[4911]: I0929 21:40:41.398934 4911 generic.go:334] "Generic (PLEG): container finished" podID="df57577d-bb06-4339-a3e6-27a2cf733d17" containerID="2bf107bfb438ad14f6a36882df2aafdc13d5c6cee07bd61a604c6ac73512a9ce" exitCode=0 Sep 29 21:40:41 crc kubenswrapper[4911]: I0929 21:40:41.399068 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"df57577d-bb06-4339-a3e6-27a2cf733d17","Type":"ContainerDied","Data":"2bf107bfb438ad14f6a36882df2aafdc13d5c6cee07bd61a604c6ac73512a9ce"} Sep 29 21:40:41 crc kubenswrapper[4911]: I0929 21:40:41.405163 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-gptbl" event={"ID":"a820cb69-2ee4-4fd1-a51e-9852bd1151fe","Type":"ContainerStarted","Data":"2bb29c61c72c266ecf456dbd30d23030bf5a36fbe33dfd5c84f926974cfaed8f"} Sep 29 21:40:41 crc kubenswrapper[4911]: I0929 21:40:41.405303 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-86db49b7ff-gptbl" Sep 29 21:40:41 crc kubenswrapper[4911]: I0929 21:40:41.409483 4911 generic.go:334] "Generic (PLEG): container finished" podID="9fb504fe-401c-4b1d-af71-171d017883be" containerID="f0a4291b7cebf5d29109ab1fb3fbbf3a7a97b3d7d77f4d1f16f51110934bfed0" exitCode=0 Sep 29 21:40:41 crc kubenswrapper[4911]: I0929 21:40:41.409533 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"9fb504fe-401c-4b1d-af71-171d017883be","Type":"ContainerDied","Data":"f0a4291b7cebf5d29109ab1fb3fbbf3a7a97b3d7d77f4d1f16f51110934bfed0"} Sep 29 21:40:41 crc kubenswrapper[4911]: I0929 21:40:41.483216 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-86db49b7ff-gptbl" podStartSLOduration=3.483172056 podStartE2EDuration="3.483172056s" podCreationTimestamp="2025-09-29 21:40:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:40:41.464760083 +0000 UTC m=+919.441872794" watchObservedRunningTime="2025-09-29 21:40:41.483172056 +0000 UTC m=+919.460284747" Sep 29 21:40:41 crc kubenswrapper[4911]: I0929 21:40:41.748116 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Sep 29 21:40:41 crc kubenswrapper[4911]: I0929 21:40:41.799437 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Sep 29 21:40:41 crc kubenswrapper[4911]: I0929 21:40:41.998475 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Sep 29 21:40:42 crc kubenswrapper[4911]: I0929 21:40:42.422863 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"9fb504fe-401c-4b1d-af71-171d017883be","Type":"ContainerStarted","Data":"0d4e3ef2da86c0027e39a23c088f88bf42990cebe73700efb048f5792affc06e"} Sep 29 21:40:42 crc kubenswrapper[4911]: I0929 21:40:42.427165 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"df57577d-bb06-4339-a3e6-27a2cf733d17","Type":"ContainerStarted","Data":"5716bcaebf583b31bb06c2c692009a03c593fdd7c68df1cfe152e5f5136aa998"} Sep 29 21:40:42 crc kubenswrapper[4911]: I0929 21:40:42.461136 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=18.059481965 podStartE2EDuration="27.461113038s" podCreationTimestamp="2025-09-29 21:40:15 +0000 UTC" firstStartedPulling="2025-09-29 21:40:24.552260126 +0000 UTC m=+902.529372797" lastFinishedPulling="2025-09-29 21:40:33.953891199 +0000 UTC m=+911.931003870" observedRunningTime="2025-09-29 21:40:42.452877062 +0000 UTC m=+920.429989813" watchObservedRunningTime="2025-09-29 21:40:42.461113038 +0000 UTC m=+920.438225729" Sep 29 21:40:42 crc kubenswrapper[4911]: I0929 21:40:42.501320 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=17.939447751 podStartE2EDuration="27.501291848s" podCreationTimestamp="2025-09-29 21:40:15 +0000 UTC" firstStartedPulling="2025-09-29 21:40:24.555503867 +0000 UTC m=+902.532616538" lastFinishedPulling="2025-09-29 21:40:34.117347964 +0000 UTC m=+912.094460635" observedRunningTime="2025-09-29 21:40:42.486023533 +0000 UTC m=+920.463136274" watchObservedRunningTime="2025-09-29 21:40:42.501291848 +0000 UTC m=+920.478404559" Sep 29 21:40:42 crc kubenswrapper[4911]: I0929 21:40:42.507548 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Sep 29 21:40:42 crc kubenswrapper[4911]: I0929 21:40:42.552042 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Sep 29 21:40:44 crc kubenswrapper[4911]: I0929 21:40:44.016387 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Sep 29 21:40:44 crc kubenswrapper[4911]: I0929 21:40:44.256746 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Sep 29 21:40:44 crc kubenswrapper[4911]: E0929 21:40:44.257630 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ae2c7f1-3c57-4aa1-8d29-f56ab6f2dce9" containerName="dnsmasq-dns" Sep 29 21:40:44 crc kubenswrapper[4911]: I0929 21:40:44.257662 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ae2c7f1-3c57-4aa1-8d29-f56ab6f2dce9" containerName="dnsmasq-dns" Sep 29 21:40:44 crc kubenswrapper[4911]: E0929 21:40:44.257703 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ae2c7f1-3c57-4aa1-8d29-f56ab6f2dce9" containerName="init" Sep 29 21:40:44 crc kubenswrapper[4911]: I0929 21:40:44.257716 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ae2c7f1-3c57-4aa1-8d29-f56ab6f2dce9" containerName="init" Sep 29 21:40:44 crc kubenswrapper[4911]: I0929 21:40:44.258093 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="6ae2c7f1-3c57-4aa1-8d29-f56ab6f2dce9" containerName="dnsmasq-dns" Sep 29 21:40:44 crc kubenswrapper[4911]: I0929 21:40:44.259421 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Sep 29 21:40:44 crc kubenswrapper[4911]: I0929 21:40:44.262121 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Sep 29 21:40:44 crc kubenswrapper[4911]: I0929 21:40:44.262176 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Sep 29 21:40:44 crc kubenswrapper[4911]: I0929 21:40:44.266781 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-65tkp" Sep 29 21:40:44 crc kubenswrapper[4911]: I0929 21:40:44.266879 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Sep 29 21:40:44 crc kubenswrapper[4911]: I0929 21:40:44.270728 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Sep 29 21:40:44 crc kubenswrapper[4911]: I0929 21:40:44.295306 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/7e6d47ac-0fbd-4e48-8ec0-8b8c7fd086d2-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"7e6d47ac-0fbd-4e48-8ec0-8b8c7fd086d2\") " pod="openstack/ovn-northd-0" Sep 29 21:40:44 crc kubenswrapper[4911]: I0929 21:40:44.295558 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5dz5g\" (UniqueName: \"kubernetes.io/projected/7e6d47ac-0fbd-4e48-8ec0-8b8c7fd086d2-kube-api-access-5dz5g\") pod \"ovn-northd-0\" (UID: \"7e6d47ac-0fbd-4e48-8ec0-8b8c7fd086d2\") " pod="openstack/ovn-northd-0" Sep 29 21:40:44 crc kubenswrapper[4911]: I0929 21:40:44.295706 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7e6d47ac-0fbd-4e48-8ec0-8b8c7fd086d2-config\") pod \"ovn-northd-0\" (UID: \"7e6d47ac-0fbd-4e48-8ec0-8b8c7fd086d2\") " pod="openstack/ovn-northd-0" Sep 29 21:40:44 crc kubenswrapper[4911]: I0929 21:40:44.295859 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e6d47ac-0fbd-4e48-8ec0-8b8c7fd086d2-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"7e6d47ac-0fbd-4e48-8ec0-8b8c7fd086d2\") " pod="openstack/ovn-northd-0" Sep 29 21:40:44 crc kubenswrapper[4911]: I0929 21:40:44.296976 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7e6d47ac-0fbd-4e48-8ec0-8b8c7fd086d2-scripts\") pod \"ovn-northd-0\" (UID: \"7e6d47ac-0fbd-4e48-8ec0-8b8c7fd086d2\") " pod="openstack/ovn-northd-0" Sep 29 21:40:44 crc kubenswrapper[4911]: I0929 21:40:44.297062 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/7e6d47ac-0fbd-4e48-8ec0-8b8c7fd086d2-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"7e6d47ac-0fbd-4e48-8ec0-8b8c7fd086d2\") " pod="openstack/ovn-northd-0" Sep 29 21:40:44 crc kubenswrapper[4911]: I0929 21:40:44.297242 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/7e6d47ac-0fbd-4e48-8ec0-8b8c7fd086d2-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"7e6d47ac-0fbd-4e48-8ec0-8b8c7fd086d2\") " pod="openstack/ovn-northd-0" Sep 29 21:40:44 crc kubenswrapper[4911]: I0929 21:40:44.398750 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5dz5g\" (UniqueName: \"kubernetes.io/projected/7e6d47ac-0fbd-4e48-8ec0-8b8c7fd086d2-kube-api-access-5dz5g\") pod \"ovn-northd-0\" (UID: \"7e6d47ac-0fbd-4e48-8ec0-8b8c7fd086d2\") " pod="openstack/ovn-northd-0" Sep 29 21:40:44 crc kubenswrapper[4911]: I0929 21:40:44.398839 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7e6d47ac-0fbd-4e48-8ec0-8b8c7fd086d2-config\") pod \"ovn-northd-0\" (UID: \"7e6d47ac-0fbd-4e48-8ec0-8b8c7fd086d2\") " pod="openstack/ovn-northd-0" Sep 29 21:40:44 crc kubenswrapper[4911]: I0929 21:40:44.398869 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e6d47ac-0fbd-4e48-8ec0-8b8c7fd086d2-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"7e6d47ac-0fbd-4e48-8ec0-8b8c7fd086d2\") " pod="openstack/ovn-northd-0" Sep 29 21:40:44 crc kubenswrapper[4911]: I0929 21:40:44.399694 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7e6d47ac-0fbd-4e48-8ec0-8b8c7fd086d2-config\") pod \"ovn-northd-0\" (UID: \"7e6d47ac-0fbd-4e48-8ec0-8b8c7fd086d2\") " pod="openstack/ovn-northd-0" Sep 29 21:40:44 crc kubenswrapper[4911]: I0929 21:40:44.400289 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7e6d47ac-0fbd-4e48-8ec0-8b8c7fd086d2-scripts\") pod \"ovn-northd-0\" (UID: \"7e6d47ac-0fbd-4e48-8ec0-8b8c7fd086d2\") " pod="openstack/ovn-northd-0" Sep 29 21:40:44 crc kubenswrapper[4911]: I0929 21:40:44.399784 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7e6d47ac-0fbd-4e48-8ec0-8b8c7fd086d2-scripts\") pod \"ovn-northd-0\" (UID: \"7e6d47ac-0fbd-4e48-8ec0-8b8c7fd086d2\") " pod="openstack/ovn-northd-0" Sep 29 21:40:44 crc kubenswrapper[4911]: I0929 21:40:44.400364 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/7e6d47ac-0fbd-4e48-8ec0-8b8c7fd086d2-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"7e6d47ac-0fbd-4e48-8ec0-8b8c7fd086d2\") " pod="openstack/ovn-northd-0" Sep 29 21:40:44 crc kubenswrapper[4911]: I0929 21:40:44.400654 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/7e6d47ac-0fbd-4e48-8ec0-8b8c7fd086d2-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"7e6d47ac-0fbd-4e48-8ec0-8b8c7fd086d2\") " pod="openstack/ovn-northd-0" Sep 29 21:40:44 crc kubenswrapper[4911]: I0929 21:40:44.400400 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/7e6d47ac-0fbd-4e48-8ec0-8b8c7fd086d2-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"7e6d47ac-0fbd-4e48-8ec0-8b8c7fd086d2\") " pod="openstack/ovn-northd-0" Sep 29 21:40:44 crc kubenswrapper[4911]: I0929 21:40:44.401951 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/7e6d47ac-0fbd-4e48-8ec0-8b8c7fd086d2-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"7e6d47ac-0fbd-4e48-8ec0-8b8c7fd086d2\") " pod="openstack/ovn-northd-0" Sep 29 21:40:44 crc kubenswrapper[4911]: I0929 21:40:44.405957 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/7e6d47ac-0fbd-4e48-8ec0-8b8c7fd086d2-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"7e6d47ac-0fbd-4e48-8ec0-8b8c7fd086d2\") " pod="openstack/ovn-northd-0" Sep 29 21:40:44 crc kubenswrapper[4911]: I0929 21:40:44.406397 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e6d47ac-0fbd-4e48-8ec0-8b8c7fd086d2-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"7e6d47ac-0fbd-4e48-8ec0-8b8c7fd086d2\") " pod="openstack/ovn-northd-0" Sep 29 21:40:44 crc kubenswrapper[4911]: I0929 21:40:44.411621 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/7e6d47ac-0fbd-4e48-8ec0-8b8c7fd086d2-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"7e6d47ac-0fbd-4e48-8ec0-8b8c7fd086d2\") " pod="openstack/ovn-northd-0" Sep 29 21:40:44 crc kubenswrapper[4911]: I0929 21:40:44.416439 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5dz5g\" (UniqueName: \"kubernetes.io/projected/7e6d47ac-0fbd-4e48-8ec0-8b8c7fd086d2-kube-api-access-5dz5g\") pod \"ovn-northd-0\" (UID: \"7e6d47ac-0fbd-4e48-8ec0-8b8c7fd086d2\") " pod="openstack/ovn-northd-0" Sep 29 21:40:44 crc kubenswrapper[4911]: I0929 21:40:44.578415 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Sep 29 21:40:45 crc kubenswrapper[4911]: I0929 21:40:45.040534 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Sep 29 21:40:45 crc kubenswrapper[4911]: I0929 21:40:45.455524 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"7e6d47ac-0fbd-4e48-8ec0-8b8c7fd086d2","Type":"ContainerStarted","Data":"8648b2223e37c904f4373bdc88e2567b2ec4619b2c2fd34318c4ac030d1b475e"} Sep 29 21:40:46 crc kubenswrapper[4911]: I0929 21:40:46.466783 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"7e6d47ac-0fbd-4e48-8ec0-8b8c7fd086d2","Type":"ContainerStarted","Data":"39efe656d7d93a56c473aa1abcd253d71c4aa55007da02ad5e1b82f5a6da2357"} Sep 29 21:40:47 crc kubenswrapper[4911]: I0929 21:40:47.094881 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Sep 29 21:40:47 crc kubenswrapper[4911]: I0929 21:40:47.095325 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Sep 29 21:40:47 crc kubenswrapper[4911]: I0929 21:40:47.107625 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Sep 29 21:40:47 crc kubenswrapper[4911]: I0929 21:40:47.107690 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Sep 29 21:40:47 crc kubenswrapper[4911]: I0929 21:40:47.180229 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Sep 29 21:40:47 crc kubenswrapper[4911]: I0929 21:40:47.185471 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Sep 29 21:40:47 crc kubenswrapper[4911]: I0929 21:40:47.480112 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"7e6d47ac-0fbd-4e48-8ec0-8b8c7fd086d2","Type":"ContainerStarted","Data":"e193a05a00119eaf7334dc6454589940f06d74979a47027993fd987ec8b76b89"} Sep 29 21:40:47 crc kubenswrapper[4911]: I0929 21:40:47.517241 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=2.313083007 podStartE2EDuration="3.517216747s" podCreationTimestamp="2025-09-29 21:40:44 +0000 UTC" firstStartedPulling="2025-09-29 21:40:45.048471137 +0000 UTC m=+923.025583818" lastFinishedPulling="2025-09-29 21:40:46.252604887 +0000 UTC m=+924.229717558" observedRunningTime="2025-09-29 21:40:47.506101912 +0000 UTC m=+925.483214593" watchObservedRunningTime="2025-09-29 21:40:47.517216747 +0000 UTC m=+925.494329428" Sep 29 21:40:47 crc kubenswrapper[4911]: I0929 21:40:47.559899 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Sep 29 21:40:47 crc kubenswrapper[4911]: I0929 21:40:47.561750 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Sep 29 21:40:48 crc kubenswrapper[4911]: I0929 21:40:48.491621 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Sep 29 21:40:49 crc kubenswrapper[4911]: I0929 21:40:49.045484 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Sep 29 21:40:49 crc kubenswrapper[4911]: I0929 21:40:49.204732 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-86db49b7ff-gptbl" Sep 29 21:40:49 crc kubenswrapper[4911]: I0929 21:40:49.242320 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-gptbl"] Sep 29 21:40:49 crc kubenswrapper[4911]: I0929 21:40:49.309957 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-698758b865-m6jw6"] Sep 29 21:40:49 crc kubenswrapper[4911]: I0929 21:40:49.311199 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-m6jw6" Sep 29 21:40:49 crc kubenswrapper[4911]: I0929 21:40:49.336806 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-698758b865-m6jw6"] Sep 29 21:40:49 crc kubenswrapper[4911]: I0929 21:40:49.494144 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f88b6474-345c-4c65-9455-10feb2d34fd3-config\") pod \"dnsmasq-dns-698758b865-m6jw6\" (UID: \"f88b6474-345c-4c65-9455-10feb2d34fd3\") " pod="openstack/dnsmasq-dns-698758b865-m6jw6" Sep 29 21:40:49 crc kubenswrapper[4911]: I0929 21:40:49.494196 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f88b6474-345c-4c65-9455-10feb2d34fd3-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-m6jw6\" (UID: \"f88b6474-345c-4c65-9455-10feb2d34fd3\") " pod="openstack/dnsmasq-dns-698758b865-m6jw6" Sep 29 21:40:49 crc kubenswrapper[4911]: I0929 21:40:49.494218 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f88b6474-345c-4c65-9455-10feb2d34fd3-dns-svc\") pod \"dnsmasq-dns-698758b865-m6jw6\" (UID: \"f88b6474-345c-4c65-9455-10feb2d34fd3\") " pod="openstack/dnsmasq-dns-698758b865-m6jw6" Sep 29 21:40:49 crc kubenswrapper[4911]: I0929 21:40:49.494240 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f88b6474-345c-4c65-9455-10feb2d34fd3-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-m6jw6\" (UID: \"f88b6474-345c-4c65-9455-10feb2d34fd3\") " pod="openstack/dnsmasq-dns-698758b865-m6jw6" Sep 29 21:40:49 crc kubenswrapper[4911]: I0929 21:40:49.494333 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5pqp6\" (UniqueName: \"kubernetes.io/projected/f88b6474-345c-4c65-9455-10feb2d34fd3-kube-api-access-5pqp6\") pod \"dnsmasq-dns-698758b865-m6jw6\" (UID: \"f88b6474-345c-4c65-9455-10feb2d34fd3\") " pod="openstack/dnsmasq-dns-698758b865-m6jw6" Sep 29 21:40:49 crc kubenswrapper[4911]: I0929 21:40:49.499684 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-86db49b7ff-gptbl" podUID="a820cb69-2ee4-4fd1-a51e-9852bd1151fe" containerName="dnsmasq-dns" containerID="cri-o://2bb29c61c72c266ecf456dbd30d23030bf5a36fbe33dfd5c84f926974cfaed8f" gracePeriod=10 Sep 29 21:40:49 crc kubenswrapper[4911]: I0929 21:40:49.595635 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f88b6474-345c-4c65-9455-10feb2d34fd3-config\") pod \"dnsmasq-dns-698758b865-m6jw6\" (UID: \"f88b6474-345c-4c65-9455-10feb2d34fd3\") " pod="openstack/dnsmasq-dns-698758b865-m6jw6" Sep 29 21:40:49 crc kubenswrapper[4911]: I0929 21:40:49.595681 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f88b6474-345c-4c65-9455-10feb2d34fd3-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-m6jw6\" (UID: \"f88b6474-345c-4c65-9455-10feb2d34fd3\") " pod="openstack/dnsmasq-dns-698758b865-m6jw6" Sep 29 21:40:49 crc kubenswrapper[4911]: I0929 21:40:49.595699 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f88b6474-345c-4c65-9455-10feb2d34fd3-dns-svc\") pod \"dnsmasq-dns-698758b865-m6jw6\" (UID: \"f88b6474-345c-4c65-9455-10feb2d34fd3\") " pod="openstack/dnsmasq-dns-698758b865-m6jw6" Sep 29 21:40:49 crc kubenswrapper[4911]: I0929 21:40:49.595716 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f88b6474-345c-4c65-9455-10feb2d34fd3-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-m6jw6\" (UID: \"f88b6474-345c-4c65-9455-10feb2d34fd3\") " pod="openstack/dnsmasq-dns-698758b865-m6jw6" Sep 29 21:40:49 crc kubenswrapper[4911]: I0929 21:40:49.595749 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5pqp6\" (UniqueName: \"kubernetes.io/projected/f88b6474-345c-4c65-9455-10feb2d34fd3-kube-api-access-5pqp6\") pod \"dnsmasq-dns-698758b865-m6jw6\" (UID: \"f88b6474-345c-4c65-9455-10feb2d34fd3\") " pod="openstack/dnsmasq-dns-698758b865-m6jw6" Sep 29 21:40:49 crc kubenswrapper[4911]: I0929 21:40:49.597055 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f88b6474-345c-4c65-9455-10feb2d34fd3-config\") pod \"dnsmasq-dns-698758b865-m6jw6\" (UID: \"f88b6474-345c-4c65-9455-10feb2d34fd3\") " pod="openstack/dnsmasq-dns-698758b865-m6jw6" Sep 29 21:40:49 crc kubenswrapper[4911]: I0929 21:40:49.597210 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f88b6474-345c-4c65-9455-10feb2d34fd3-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-m6jw6\" (UID: \"f88b6474-345c-4c65-9455-10feb2d34fd3\") " pod="openstack/dnsmasq-dns-698758b865-m6jw6" Sep 29 21:40:49 crc kubenswrapper[4911]: I0929 21:40:49.597299 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f88b6474-345c-4c65-9455-10feb2d34fd3-dns-svc\") pod \"dnsmasq-dns-698758b865-m6jw6\" (UID: \"f88b6474-345c-4c65-9455-10feb2d34fd3\") " pod="openstack/dnsmasq-dns-698758b865-m6jw6" Sep 29 21:40:49 crc kubenswrapper[4911]: I0929 21:40:49.597776 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f88b6474-345c-4c65-9455-10feb2d34fd3-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-m6jw6\" (UID: \"f88b6474-345c-4c65-9455-10feb2d34fd3\") " pod="openstack/dnsmasq-dns-698758b865-m6jw6" Sep 29 21:40:49 crc kubenswrapper[4911]: I0929 21:40:49.615611 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5pqp6\" (UniqueName: \"kubernetes.io/projected/f88b6474-345c-4c65-9455-10feb2d34fd3-kube-api-access-5pqp6\") pod \"dnsmasq-dns-698758b865-m6jw6\" (UID: \"f88b6474-345c-4c65-9455-10feb2d34fd3\") " pod="openstack/dnsmasq-dns-698758b865-m6jw6" Sep 29 21:40:49 crc kubenswrapper[4911]: I0929 21:40:49.645295 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-m6jw6" Sep 29 21:40:49 crc kubenswrapper[4911]: I0929 21:40:49.959686 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-gptbl" Sep 29 21:40:50 crc kubenswrapper[4911]: I0929 21:40:50.106043 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a820cb69-2ee4-4fd1-a51e-9852bd1151fe-ovsdbserver-nb\") pod \"a820cb69-2ee4-4fd1-a51e-9852bd1151fe\" (UID: \"a820cb69-2ee4-4fd1-a51e-9852bd1151fe\") " Sep 29 21:40:50 crc kubenswrapper[4911]: I0929 21:40:50.106212 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a820cb69-2ee4-4fd1-a51e-9852bd1151fe-dns-svc\") pod \"a820cb69-2ee4-4fd1-a51e-9852bd1151fe\" (UID: \"a820cb69-2ee4-4fd1-a51e-9852bd1151fe\") " Sep 29 21:40:50 crc kubenswrapper[4911]: I0929 21:40:50.106250 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a820cb69-2ee4-4fd1-a51e-9852bd1151fe-ovsdbserver-sb\") pod \"a820cb69-2ee4-4fd1-a51e-9852bd1151fe\" (UID: \"a820cb69-2ee4-4fd1-a51e-9852bd1151fe\") " Sep 29 21:40:50 crc kubenswrapper[4911]: I0929 21:40:50.106301 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dw4dk\" (UniqueName: \"kubernetes.io/projected/a820cb69-2ee4-4fd1-a51e-9852bd1151fe-kube-api-access-dw4dk\") pod \"a820cb69-2ee4-4fd1-a51e-9852bd1151fe\" (UID: \"a820cb69-2ee4-4fd1-a51e-9852bd1151fe\") " Sep 29 21:40:50 crc kubenswrapper[4911]: I0929 21:40:50.106358 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a820cb69-2ee4-4fd1-a51e-9852bd1151fe-config\") pod \"a820cb69-2ee4-4fd1-a51e-9852bd1151fe\" (UID: \"a820cb69-2ee4-4fd1-a51e-9852bd1151fe\") " Sep 29 21:40:50 crc kubenswrapper[4911]: I0929 21:40:50.113087 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a820cb69-2ee4-4fd1-a51e-9852bd1151fe-kube-api-access-dw4dk" (OuterVolumeSpecName: "kube-api-access-dw4dk") pod "a820cb69-2ee4-4fd1-a51e-9852bd1151fe" (UID: "a820cb69-2ee4-4fd1-a51e-9852bd1151fe"). InnerVolumeSpecName "kube-api-access-dw4dk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:40:50 crc kubenswrapper[4911]: I0929 21:40:50.150426 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-698758b865-m6jw6"] Sep 29 21:40:50 crc kubenswrapper[4911]: W0929 21:40:50.151772 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf88b6474_345c_4c65_9455_10feb2d34fd3.slice/crio-f4bbc103efe55cb97996a58b0f1d2f364b9d56fed9e32a89246145a88df5c904 WatchSource:0}: Error finding container f4bbc103efe55cb97996a58b0f1d2f364b9d56fed9e32a89246145a88df5c904: Status 404 returned error can't find the container with id f4bbc103efe55cb97996a58b0f1d2f364b9d56fed9e32a89246145a88df5c904 Sep 29 21:40:50 crc kubenswrapper[4911]: I0929 21:40:50.172522 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a820cb69-2ee4-4fd1-a51e-9852bd1151fe-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "a820cb69-2ee4-4fd1-a51e-9852bd1151fe" (UID: "a820cb69-2ee4-4fd1-a51e-9852bd1151fe"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:40:50 crc kubenswrapper[4911]: I0929 21:40:50.189706 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a820cb69-2ee4-4fd1-a51e-9852bd1151fe-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "a820cb69-2ee4-4fd1-a51e-9852bd1151fe" (UID: "a820cb69-2ee4-4fd1-a51e-9852bd1151fe"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:40:50 crc kubenswrapper[4911]: I0929 21:40:50.190480 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a820cb69-2ee4-4fd1-a51e-9852bd1151fe-config" (OuterVolumeSpecName: "config") pod "a820cb69-2ee4-4fd1-a51e-9852bd1151fe" (UID: "a820cb69-2ee4-4fd1-a51e-9852bd1151fe"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:40:50 crc kubenswrapper[4911]: I0929 21:40:50.194112 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a820cb69-2ee4-4fd1-a51e-9852bd1151fe-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "a820cb69-2ee4-4fd1-a51e-9852bd1151fe" (UID: "a820cb69-2ee4-4fd1-a51e-9852bd1151fe"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:40:50 crc kubenswrapper[4911]: I0929 21:40:50.208104 4911 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a820cb69-2ee4-4fd1-a51e-9852bd1151fe-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 21:40:50 crc kubenswrapper[4911]: I0929 21:40:50.208230 4911 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a820cb69-2ee4-4fd1-a51e-9852bd1151fe-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 21:40:50 crc kubenswrapper[4911]: I0929 21:40:50.208243 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dw4dk\" (UniqueName: \"kubernetes.io/projected/a820cb69-2ee4-4fd1-a51e-9852bd1151fe-kube-api-access-dw4dk\") on node \"crc\" DevicePath \"\"" Sep 29 21:40:50 crc kubenswrapper[4911]: I0929 21:40:50.208254 4911 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a820cb69-2ee4-4fd1-a51e-9852bd1151fe-config\") on node \"crc\" DevicePath \"\"" Sep 29 21:40:50 crc kubenswrapper[4911]: I0929 21:40:50.208263 4911 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a820cb69-2ee4-4fd1-a51e-9852bd1151fe-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 21:40:50 crc kubenswrapper[4911]: I0929 21:40:50.369956 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Sep 29 21:40:50 crc kubenswrapper[4911]: E0929 21:40:50.370831 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a820cb69-2ee4-4fd1-a51e-9852bd1151fe" containerName="dnsmasq-dns" Sep 29 21:40:50 crc kubenswrapper[4911]: I0929 21:40:50.371040 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="a820cb69-2ee4-4fd1-a51e-9852bd1151fe" containerName="dnsmasq-dns" Sep 29 21:40:50 crc kubenswrapper[4911]: E0929 21:40:50.371495 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a820cb69-2ee4-4fd1-a51e-9852bd1151fe" containerName="init" Sep 29 21:40:50 crc kubenswrapper[4911]: I0929 21:40:50.371623 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="a820cb69-2ee4-4fd1-a51e-9852bd1151fe" containerName="init" Sep 29 21:40:50 crc kubenswrapper[4911]: I0929 21:40:50.372222 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="a820cb69-2ee4-4fd1-a51e-9852bd1151fe" containerName="dnsmasq-dns" Sep 29 21:40:50 crc kubenswrapper[4911]: I0929 21:40:50.392974 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Sep 29 21:40:50 crc kubenswrapper[4911]: I0929 21:40:50.400254 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Sep 29 21:40:50 crc kubenswrapper[4911]: I0929 21:40:50.400304 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Sep 29 21:40:50 crc kubenswrapper[4911]: I0929 21:40:50.401716 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Sep 29 21:40:50 crc kubenswrapper[4911]: I0929 21:40:50.402278 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Sep 29 21:40:50 crc kubenswrapper[4911]: I0929 21:40:50.402467 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-854d2" Sep 29 21:40:50 crc kubenswrapper[4911]: I0929 21:40:50.507363 4911 generic.go:334] "Generic (PLEG): container finished" podID="a820cb69-2ee4-4fd1-a51e-9852bd1151fe" containerID="2bb29c61c72c266ecf456dbd30d23030bf5a36fbe33dfd5c84f926974cfaed8f" exitCode=0 Sep 29 21:40:50 crc kubenswrapper[4911]: I0929 21:40:50.507433 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-gptbl" event={"ID":"a820cb69-2ee4-4fd1-a51e-9852bd1151fe","Type":"ContainerDied","Data":"2bb29c61c72c266ecf456dbd30d23030bf5a36fbe33dfd5c84f926974cfaed8f"} Sep 29 21:40:50 crc kubenswrapper[4911]: I0929 21:40:50.507465 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-gptbl" event={"ID":"a820cb69-2ee4-4fd1-a51e-9852bd1151fe","Type":"ContainerDied","Data":"1b5798274458c7828d874a28f82cca2ecb168bfebf023920ec03617f926b263b"} Sep 29 21:40:50 crc kubenswrapper[4911]: I0929 21:40:50.507482 4911 scope.go:117] "RemoveContainer" containerID="2bb29c61c72c266ecf456dbd30d23030bf5a36fbe33dfd5c84f926974cfaed8f" Sep 29 21:40:50 crc kubenswrapper[4911]: I0929 21:40:50.507614 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-gptbl" Sep 29 21:40:50 crc kubenswrapper[4911]: I0929 21:40:50.512415 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"swift-storage-0\" (UID: \"00a836d8-70df-4583-948f-e6869d77e432\") " pod="openstack/swift-storage-0" Sep 29 21:40:50 crc kubenswrapper[4911]: I0929 21:40:50.512481 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/00a836d8-70df-4583-948f-e6869d77e432-lock\") pod \"swift-storage-0\" (UID: \"00a836d8-70df-4583-948f-e6869d77e432\") " pod="openstack/swift-storage-0" Sep 29 21:40:50 crc kubenswrapper[4911]: I0929 21:40:50.512533 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x7k5v\" (UniqueName: \"kubernetes.io/projected/00a836d8-70df-4583-948f-e6869d77e432-kube-api-access-x7k5v\") pod \"swift-storage-0\" (UID: \"00a836d8-70df-4583-948f-e6869d77e432\") " pod="openstack/swift-storage-0" Sep 29 21:40:50 crc kubenswrapper[4911]: I0929 21:40:50.512552 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/00a836d8-70df-4583-948f-e6869d77e432-cache\") pod \"swift-storage-0\" (UID: \"00a836d8-70df-4583-948f-e6869d77e432\") " pod="openstack/swift-storage-0" Sep 29 21:40:50 crc kubenswrapper[4911]: I0929 21:40:50.512574 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/00a836d8-70df-4583-948f-e6869d77e432-etc-swift\") pod \"swift-storage-0\" (UID: \"00a836d8-70df-4583-948f-e6869d77e432\") " pod="openstack/swift-storage-0" Sep 29 21:40:50 crc kubenswrapper[4911]: I0929 21:40:50.521427 4911 generic.go:334] "Generic (PLEG): container finished" podID="f88b6474-345c-4c65-9455-10feb2d34fd3" containerID="bb5625680b66758b8e48e47e158401bae96bcbc9d6f4218abe4fe5c9c98c7df0" exitCode=0 Sep 29 21:40:50 crc kubenswrapper[4911]: I0929 21:40:50.521471 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-m6jw6" event={"ID":"f88b6474-345c-4c65-9455-10feb2d34fd3","Type":"ContainerDied","Data":"bb5625680b66758b8e48e47e158401bae96bcbc9d6f4218abe4fe5c9c98c7df0"} Sep 29 21:40:50 crc kubenswrapper[4911]: I0929 21:40:50.521496 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-m6jw6" event={"ID":"f88b6474-345c-4c65-9455-10feb2d34fd3","Type":"ContainerStarted","Data":"f4bbc103efe55cb97996a58b0f1d2f364b9d56fed9e32a89246145a88df5c904"} Sep 29 21:40:50 crc kubenswrapper[4911]: I0929 21:40:50.549639 4911 scope.go:117] "RemoveContainer" containerID="b1f5f0a5bdc209617a41b00b86ca8c73b42b4ca239520bcf0aa75d5056f9f52b" Sep 29 21:40:50 crc kubenswrapper[4911]: I0929 21:40:50.577861 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-gptbl"] Sep 29 21:40:50 crc kubenswrapper[4911]: I0929 21:40:50.580084 4911 scope.go:117] "RemoveContainer" containerID="2bb29c61c72c266ecf456dbd30d23030bf5a36fbe33dfd5c84f926974cfaed8f" Sep 29 21:40:50 crc kubenswrapper[4911]: E0929 21:40:50.580483 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2bb29c61c72c266ecf456dbd30d23030bf5a36fbe33dfd5c84f926974cfaed8f\": container with ID starting with 2bb29c61c72c266ecf456dbd30d23030bf5a36fbe33dfd5c84f926974cfaed8f not found: ID does not exist" containerID="2bb29c61c72c266ecf456dbd30d23030bf5a36fbe33dfd5c84f926974cfaed8f" Sep 29 21:40:50 crc kubenswrapper[4911]: I0929 21:40:50.580531 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2bb29c61c72c266ecf456dbd30d23030bf5a36fbe33dfd5c84f926974cfaed8f"} err="failed to get container status \"2bb29c61c72c266ecf456dbd30d23030bf5a36fbe33dfd5c84f926974cfaed8f\": rpc error: code = NotFound desc = could not find container \"2bb29c61c72c266ecf456dbd30d23030bf5a36fbe33dfd5c84f926974cfaed8f\": container with ID starting with 2bb29c61c72c266ecf456dbd30d23030bf5a36fbe33dfd5c84f926974cfaed8f not found: ID does not exist" Sep 29 21:40:50 crc kubenswrapper[4911]: I0929 21:40:50.580561 4911 scope.go:117] "RemoveContainer" containerID="b1f5f0a5bdc209617a41b00b86ca8c73b42b4ca239520bcf0aa75d5056f9f52b" Sep 29 21:40:50 crc kubenswrapper[4911]: E0929 21:40:50.581146 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b1f5f0a5bdc209617a41b00b86ca8c73b42b4ca239520bcf0aa75d5056f9f52b\": container with ID starting with b1f5f0a5bdc209617a41b00b86ca8c73b42b4ca239520bcf0aa75d5056f9f52b not found: ID does not exist" containerID="b1f5f0a5bdc209617a41b00b86ca8c73b42b4ca239520bcf0aa75d5056f9f52b" Sep 29 21:40:50 crc kubenswrapper[4911]: I0929 21:40:50.581183 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b1f5f0a5bdc209617a41b00b86ca8c73b42b4ca239520bcf0aa75d5056f9f52b"} err="failed to get container status \"b1f5f0a5bdc209617a41b00b86ca8c73b42b4ca239520bcf0aa75d5056f9f52b\": rpc error: code = NotFound desc = could not find container \"b1f5f0a5bdc209617a41b00b86ca8c73b42b4ca239520bcf0aa75d5056f9f52b\": container with ID starting with b1f5f0a5bdc209617a41b00b86ca8c73b42b4ca239520bcf0aa75d5056f9f52b not found: ID does not exist" Sep 29 21:40:50 crc kubenswrapper[4911]: I0929 21:40:50.587669 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-gptbl"] Sep 29 21:40:50 crc kubenswrapper[4911]: I0929 21:40:50.614261 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x7k5v\" (UniqueName: \"kubernetes.io/projected/00a836d8-70df-4583-948f-e6869d77e432-kube-api-access-x7k5v\") pod \"swift-storage-0\" (UID: \"00a836d8-70df-4583-948f-e6869d77e432\") " pod="openstack/swift-storage-0" Sep 29 21:40:50 crc kubenswrapper[4911]: I0929 21:40:50.614323 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/00a836d8-70df-4583-948f-e6869d77e432-cache\") pod \"swift-storage-0\" (UID: \"00a836d8-70df-4583-948f-e6869d77e432\") " pod="openstack/swift-storage-0" Sep 29 21:40:50 crc kubenswrapper[4911]: I0929 21:40:50.614363 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/00a836d8-70df-4583-948f-e6869d77e432-etc-swift\") pod \"swift-storage-0\" (UID: \"00a836d8-70df-4583-948f-e6869d77e432\") " pod="openstack/swift-storage-0" Sep 29 21:40:50 crc kubenswrapper[4911]: I0929 21:40:50.614481 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"swift-storage-0\" (UID: \"00a836d8-70df-4583-948f-e6869d77e432\") " pod="openstack/swift-storage-0" Sep 29 21:40:50 crc kubenswrapper[4911]: I0929 21:40:50.614550 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/00a836d8-70df-4583-948f-e6869d77e432-lock\") pod \"swift-storage-0\" (UID: \"00a836d8-70df-4583-948f-e6869d77e432\") " pod="openstack/swift-storage-0" Sep 29 21:40:50 crc kubenswrapper[4911]: I0929 21:40:50.615542 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/00a836d8-70df-4583-948f-e6869d77e432-lock\") pod \"swift-storage-0\" (UID: \"00a836d8-70df-4583-948f-e6869d77e432\") " pod="openstack/swift-storage-0" Sep 29 21:40:50 crc kubenswrapper[4911]: E0929 21:40:50.615678 4911 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Sep 29 21:40:50 crc kubenswrapper[4911]: E0929 21:40:50.615704 4911 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Sep 29 21:40:50 crc kubenswrapper[4911]: E0929 21:40:50.615758 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/00a836d8-70df-4583-948f-e6869d77e432-etc-swift podName:00a836d8-70df-4583-948f-e6869d77e432 nodeName:}" failed. No retries permitted until 2025-09-29 21:40:51.115732709 +0000 UTC m=+929.092845390 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/00a836d8-70df-4583-948f-e6869d77e432-etc-swift") pod "swift-storage-0" (UID: "00a836d8-70df-4583-948f-e6869d77e432") : configmap "swift-ring-files" not found Sep 29 21:40:50 crc kubenswrapper[4911]: I0929 21:40:50.615761 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/00a836d8-70df-4583-948f-e6869d77e432-cache\") pod \"swift-storage-0\" (UID: \"00a836d8-70df-4583-948f-e6869d77e432\") " pod="openstack/swift-storage-0" Sep 29 21:40:50 crc kubenswrapper[4911]: I0929 21:40:50.616057 4911 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"swift-storage-0\" (UID: \"00a836d8-70df-4583-948f-e6869d77e432\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/swift-storage-0" Sep 29 21:40:50 crc kubenswrapper[4911]: I0929 21:40:50.644959 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x7k5v\" (UniqueName: \"kubernetes.io/projected/00a836d8-70df-4583-948f-e6869d77e432-kube-api-access-x7k5v\") pod \"swift-storage-0\" (UID: \"00a836d8-70df-4583-948f-e6869d77e432\") " pod="openstack/swift-storage-0" Sep 29 21:40:50 crc kubenswrapper[4911]: I0929 21:40:50.656520 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"swift-storage-0\" (UID: \"00a836d8-70df-4583-948f-e6869d77e432\") " pod="openstack/swift-storage-0" Sep 29 21:40:50 crc kubenswrapper[4911]: I0929 21:40:50.712871 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a820cb69-2ee4-4fd1-a51e-9852bd1151fe" path="/var/lib/kubelet/pods/a820cb69-2ee4-4fd1-a51e-9852bd1151fe/volumes" Sep 29 21:40:51 crc kubenswrapper[4911]: I0929 21:40:51.123733 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/00a836d8-70df-4583-948f-e6869d77e432-etc-swift\") pod \"swift-storage-0\" (UID: \"00a836d8-70df-4583-948f-e6869d77e432\") " pod="openstack/swift-storage-0" Sep 29 21:40:51 crc kubenswrapper[4911]: E0929 21:40:51.123939 4911 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Sep 29 21:40:51 crc kubenswrapper[4911]: E0929 21:40:51.123967 4911 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Sep 29 21:40:51 crc kubenswrapper[4911]: E0929 21:40:51.124028 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/00a836d8-70df-4583-948f-e6869d77e432-etc-swift podName:00a836d8-70df-4583-948f-e6869d77e432 nodeName:}" failed. No retries permitted until 2025-09-29 21:40:52.124011 +0000 UTC m=+930.101123671 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/00a836d8-70df-4583-948f-e6869d77e432-etc-swift") pod "swift-storage-0" (UID: "00a836d8-70df-4583-948f-e6869d77e432") : configmap "swift-ring-files" not found Sep 29 21:40:51 crc kubenswrapper[4911]: I0929 21:40:51.532663 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-m6jw6" event={"ID":"f88b6474-345c-4c65-9455-10feb2d34fd3","Type":"ContainerStarted","Data":"c2e3e297621597cd32f423865481dee395d79358feb30b394cd97f6e37a0357f"} Sep 29 21:40:51 crc kubenswrapper[4911]: I0929 21:40:51.532823 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-698758b865-m6jw6" Sep 29 21:40:51 crc kubenswrapper[4911]: I0929 21:40:51.561715 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-698758b865-m6jw6" podStartSLOduration=2.561691686 podStartE2EDuration="2.561691686s" podCreationTimestamp="2025-09-29 21:40:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:40:51.551181429 +0000 UTC m=+929.528294130" watchObservedRunningTime="2025-09-29 21:40:51.561691686 +0000 UTC m=+929.538804377" Sep 29 21:40:52 crc kubenswrapper[4911]: I0929 21:40:52.140186 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/00a836d8-70df-4583-948f-e6869d77e432-etc-swift\") pod \"swift-storage-0\" (UID: \"00a836d8-70df-4583-948f-e6869d77e432\") " pod="openstack/swift-storage-0" Sep 29 21:40:52 crc kubenswrapper[4911]: E0929 21:40:52.140455 4911 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Sep 29 21:40:52 crc kubenswrapper[4911]: E0929 21:40:52.140930 4911 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Sep 29 21:40:52 crc kubenswrapper[4911]: E0929 21:40:52.141079 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/00a836d8-70df-4583-948f-e6869d77e432-etc-swift podName:00a836d8-70df-4583-948f-e6869d77e432 nodeName:}" failed. No retries permitted until 2025-09-29 21:40:54.14105773 +0000 UTC m=+932.118170411 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/00a836d8-70df-4583-948f-e6869d77e432-etc-swift") pod "swift-storage-0" (UID: "00a836d8-70df-4583-948f-e6869d77e432") : configmap "swift-ring-files" not found Sep 29 21:40:52 crc kubenswrapper[4911]: I0929 21:40:52.848186 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-cfk2q"] Sep 29 21:40:52 crc kubenswrapper[4911]: I0929 21:40:52.849214 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-cfk2q" Sep 29 21:40:52 crc kubenswrapper[4911]: I0929 21:40:52.860786 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-cfk2q"] Sep 29 21:40:52 crc kubenswrapper[4911]: I0929 21:40:52.957080 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c5nlw\" (UniqueName: \"kubernetes.io/projected/372a6b13-1d3f-4a0b-9210-4f5b669d486a-kube-api-access-c5nlw\") pod \"glance-db-create-cfk2q\" (UID: \"372a6b13-1d3f-4a0b-9210-4f5b669d486a\") " pod="openstack/glance-db-create-cfk2q" Sep 29 21:40:53 crc kubenswrapper[4911]: I0929 21:40:53.059123 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c5nlw\" (UniqueName: \"kubernetes.io/projected/372a6b13-1d3f-4a0b-9210-4f5b669d486a-kube-api-access-c5nlw\") pod \"glance-db-create-cfk2q\" (UID: \"372a6b13-1d3f-4a0b-9210-4f5b669d486a\") " pod="openstack/glance-db-create-cfk2q" Sep 29 21:40:53 crc kubenswrapper[4911]: I0929 21:40:53.083604 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c5nlw\" (UniqueName: \"kubernetes.io/projected/372a6b13-1d3f-4a0b-9210-4f5b669d486a-kube-api-access-c5nlw\") pod \"glance-db-create-cfk2q\" (UID: \"372a6b13-1d3f-4a0b-9210-4f5b669d486a\") " pod="openstack/glance-db-create-cfk2q" Sep 29 21:40:53 crc kubenswrapper[4911]: I0929 21:40:53.217707 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-cfk2q" Sep 29 21:40:53 crc kubenswrapper[4911]: I0929 21:40:53.669536 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-cfk2q"] Sep 29 21:40:54 crc kubenswrapper[4911]: I0929 21:40:54.180707 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/00a836d8-70df-4583-948f-e6869d77e432-etc-swift\") pod \"swift-storage-0\" (UID: \"00a836d8-70df-4583-948f-e6869d77e432\") " pod="openstack/swift-storage-0" Sep 29 21:40:54 crc kubenswrapper[4911]: E0929 21:40:54.180960 4911 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Sep 29 21:40:54 crc kubenswrapper[4911]: E0929 21:40:54.180993 4911 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Sep 29 21:40:54 crc kubenswrapper[4911]: E0929 21:40:54.181060 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/00a836d8-70df-4583-948f-e6869d77e432-etc-swift podName:00a836d8-70df-4583-948f-e6869d77e432 nodeName:}" failed. No retries permitted until 2025-09-29 21:40:58.181040781 +0000 UTC m=+936.158153452 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/00a836d8-70df-4583-948f-e6869d77e432-etc-swift") pod "swift-storage-0" (UID: "00a836d8-70df-4583-948f-e6869d77e432") : configmap "swift-ring-files" not found Sep 29 21:40:54 crc kubenswrapper[4911]: I0929 21:40:54.220128 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-878rf"] Sep 29 21:40:54 crc kubenswrapper[4911]: I0929 21:40:54.224229 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-878rf" Sep 29 21:40:54 crc kubenswrapper[4911]: I0929 21:40:54.227814 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Sep 29 21:40:54 crc kubenswrapper[4911]: I0929 21:40:54.227852 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Sep 29 21:40:54 crc kubenswrapper[4911]: I0929 21:40:54.227834 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Sep 29 21:40:54 crc kubenswrapper[4911]: I0929 21:40:54.236034 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-878rf"] Sep 29 21:40:54 crc kubenswrapper[4911]: I0929 21:40:54.282727 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/66dda823-f2ce-4a7a-9632-ea5a4022de8d-etc-swift\") pod \"swift-ring-rebalance-878rf\" (UID: \"66dda823-f2ce-4a7a-9632-ea5a4022de8d\") " pod="openstack/swift-ring-rebalance-878rf" Sep 29 21:40:54 crc kubenswrapper[4911]: I0929 21:40:54.282779 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/66dda823-f2ce-4a7a-9632-ea5a4022de8d-scripts\") pod \"swift-ring-rebalance-878rf\" (UID: \"66dda823-f2ce-4a7a-9632-ea5a4022de8d\") " pod="openstack/swift-ring-rebalance-878rf" Sep 29 21:40:54 crc kubenswrapper[4911]: I0929 21:40:54.282850 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/66dda823-f2ce-4a7a-9632-ea5a4022de8d-ring-data-devices\") pod \"swift-ring-rebalance-878rf\" (UID: \"66dda823-f2ce-4a7a-9632-ea5a4022de8d\") " pod="openstack/swift-ring-rebalance-878rf" Sep 29 21:40:54 crc kubenswrapper[4911]: I0929 21:40:54.282884 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/66dda823-f2ce-4a7a-9632-ea5a4022de8d-dispersionconf\") pod \"swift-ring-rebalance-878rf\" (UID: \"66dda823-f2ce-4a7a-9632-ea5a4022de8d\") " pod="openstack/swift-ring-rebalance-878rf" Sep 29 21:40:54 crc kubenswrapper[4911]: I0929 21:40:54.282967 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bmldv\" (UniqueName: \"kubernetes.io/projected/66dda823-f2ce-4a7a-9632-ea5a4022de8d-kube-api-access-bmldv\") pod \"swift-ring-rebalance-878rf\" (UID: \"66dda823-f2ce-4a7a-9632-ea5a4022de8d\") " pod="openstack/swift-ring-rebalance-878rf" Sep 29 21:40:54 crc kubenswrapper[4911]: I0929 21:40:54.283058 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66dda823-f2ce-4a7a-9632-ea5a4022de8d-combined-ca-bundle\") pod \"swift-ring-rebalance-878rf\" (UID: \"66dda823-f2ce-4a7a-9632-ea5a4022de8d\") " pod="openstack/swift-ring-rebalance-878rf" Sep 29 21:40:54 crc kubenswrapper[4911]: I0929 21:40:54.283095 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/66dda823-f2ce-4a7a-9632-ea5a4022de8d-swiftconf\") pod \"swift-ring-rebalance-878rf\" (UID: \"66dda823-f2ce-4a7a-9632-ea5a4022de8d\") " pod="openstack/swift-ring-rebalance-878rf" Sep 29 21:40:54 crc kubenswrapper[4911]: I0929 21:40:54.386110 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66dda823-f2ce-4a7a-9632-ea5a4022de8d-combined-ca-bundle\") pod \"swift-ring-rebalance-878rf\" (UID: \"66dda823-f2ce-4a7a-9632-ea5a4022de8d\") " pod="openstack/swift-ring-rebalance-878rf" Sep 29 21:40:54 crc kubenswrapper[4911]: I0929 21:40:54.386194 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/66dda823-f2ce-4a7a-9632-ea5a4022de8d-swiftconf\") pod \"swift-ring-rebalance-878rf\" (UID: \"66dda823-f2ce-4a7a-9632-ea5a4022de8d\") " pod="openstack/swift-ring-rebalance-878rf" Sep 29 21:40:54 crc kubenswrapper[4911]: I0929 21:40:54.386302 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/66dda823-f2ce-4a7a-9632-ea5a4022de8d-etc-swift\") pod \"swift-ring-rebalance-878rf\" (UID: \"66dda823-f2ce-4a7a-9632-ea5a4022de8d\") " pod="openstack/swift-ring-rebalance-878rf" Sep 29 21:40:54 crc kubenswrapper[4911]: I0929 21:40:54.386344 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/66dda823-f2ce-4a7a-9632-ea5a4022de8d-scripts\") pod \"swift-ring-rebalance-878rf\" (UID: \"66dda823-f2ce-4a7a-9632-ea5a4022de8d\") " pod="openstack/swift-ring-rebalance-878rf" Sep 29 21:40:54 crc kubenswrapper[4911]: I0929 21:40:54.386386 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/66dda823-f2ce-4a7a-9632-ea5a4022de8d-ring-data-devices\") pod \"swift-ring-rebalance-878rf\" (UID: \"66dda823-f2ce-4a7a-9632-ea5a4022de8d\") " pod="openstack/swift-ring-rebalance-878rf" Sep 29 21:40:54 crc kubenswrapper[4911]: I0929 21:40:54.386419 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/66dda823-f2ce-4a7a-9632-ea5a4022de8d-dispersionconf\") pod \"swift-ring-rebalance-878rf\" (UID: \"66dda823-f2ce-4a7a-9632-ea5a4022de8d\") " pod="openstack/swift-ring-rebalance-878rf" Sep 29 21:40:54 crc kubenswrapper[4911]: I0929 21:40:54.386488 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bmldv\" (UniqueName: \"kubernetes.io/projected/66dda823-f2ce-4a7a-9632-ea5a4022de8d-kube-api-access-bmldv\") pod \"swift-ring-rebalance-878rf\" (UID: \"66dda823-f2ce-4a7a-9632-ea5a4022de8d\") " pod="openstack/swift-ring-rebalance-878rf" Sep 29 21:40:54 crc kubenswrapper[4911]: I0929 21:40:54.386937 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/66dda823-f2ce-4a7a-9632-ea5a4022de8d-etc-swift\") pod \"swift-ring-rebalance-878rf\" (UID: \"66dda823-f2ce-4a7a-9632-ea5a4022de8d\") " pod="openstack/swift-ring-rebalance-878rf" Sep 29 21:40:54 crc kubenswrapper[4911]: I0929 21:40:54.387447 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/66dda823-f2ce-4a7a-9632-ea5a4022de8d-scripts\") pod \"swift-ring-rebalance-878rf\" (UID: \"66dda823-f2ce-4a7a-9632-ea5a4022de8d\") " pod="openstack/swift-ring-rebalance-878rf" Sep 29 21:40:54 crc kubenswrapper[4911]: I0929 21:40:54.387942 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/66dda823-f2ce-4a7a-9632-ea5a4022de8d-ring-data-devices\") pod \"swift-ring-rebalance-878rf\" (UID: \"66dda823-f2ce-4a7a-9632-ea5a4022de8d\") " pod="openstack/swift-ring-rebalance-878rf" Sep 29 21:40:54 crc kubenswrapper[4911]: I0929 21:40:54.392456 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66dda823-f2ce-4a7a-9632-ea5a4022de8d-combined-ca-bundle\") pod \"swift-ring-rebalance-878rf\" (UID: \"66dda823-f2ce-4a7a-9632-ea5a4022de8d\") " pod="openstack/swift-ring-rebalance-878rf" Sep 29 21:40:54 crc kubenswrapper[4911]: I0929 21:40:54.393104 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/66dda823-f2ce-4a7a-9632-ea5a4022de8d-dispersionconf\") pod \"swift-ring-rebalance-878rf\" (UID: \"66dda823-f2ce-4a7a-9632-ea5a4022de8d\") " pod="openstack/swift-ring-rebalance-878rf" Sep 29 21:40:54 crc kubenswrapper[4911]: I0929 21:40:54.395211 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/66dda823-f2ce-4a7a-9632-ea5a4022de8d-swiftconf\") pod \"swift-ring-rebalance-878rf\" (UID: \"66dda823-f2ce-4a7a-9632-ea5a4022de8d\") " pod="openstack/swift-ring-rebalance-878rf" Sep 29 21:40:54 crc kubenswrapper[4911]: I0929 21:40:54.407631 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bmldv\" (UniqueName: \"kubernetes.io/projected/66dda823-f2ce-4a7a-9632-ea5a4022de8d-kube-api-access-bmldv\") pod \"swift-ring-rebalance-878rf\" (UID: \"66dda823-f2ce-4a7a-9632-ea5a4022de8d\") " pod="openstack/swift-ring-rebalance-878rf" Sep 29 21:40:54 crc kubenswrapper[4911]: I0929 21:40:54.544109 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-878rf" Sep 29 21:40:54 crc kubenswrapper[4911]: I0929 21:40:54.573237 4911 generic.go:334] "Generic (PLEG): container finished" podID="372a6b13-1d3f-4a0b-9210-4f5b669d486a" containerID="5e9a9bd7f96624db09f9d237d6cc521a52fba8449274fa40eac2e92c7a4ff51b" exitCode=0 Sep 29 21:40:54 crc kubenswrapper[4911]: I0929 21:40:54.573283 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-cfk2q" event={"ID":"372a6b13-1d3f-4a0b-9210-4f5b669d486a","Type":"ContainerDied","Data":"5e9a9bd7f96624db09f9d237d6cc521a52fba8449274fa40eac2e92c7a4ff51b"} Sep 29 21:40:54 crc kubenswrapper[4911]: I0929 21:40:54.573312 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-cfk2q" event={"ID":"372a6b13-1d3f-4a0b-9210-4f5b669d486a","Type":"ContainerStarted","Data":"3deaebbc7a4be37dc633dad330673c499022fe62d0f9285b5d8d6fe364793162"} Sep 29 21:40:55 crc kubenswrapper[4911]: I0929 21:40:55.029667 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-878rf"] Sep 29 21:40:55 crc kubenswrapper[4911]: I0929 21:40:55.214259 4911 patch_prober.go:28] interesting pod/machine-config-daemon-w647f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 21:40:55 crc kubenswrapper[4911]: I0929 21:40:55.214322 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 21:40:55 crc kubenswrapper[4911]: I0929 21:40:55.214378 4911 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-w647f" Sep 29 21:40:55 crc kubenswrapper[4911]: I0929 21:40:55.215225 4911 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5dec289dec4d2c01af51bffa09906f4044b26096f9186eb8b0d1b24f0055ec27"} pod="openshift-machine-config-operator/machine-config-daemon-w647f" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 21:40:55 crc kubenswrapper[4911]: I0929 21:40:55.215296 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" containerName="machine-config-daemon" containerID="cri-o://5dec289dec4d2c01af51bffa09906f4044b26096f9186eb8b0d1b24f0055ec27" gracePeriod=600 Sep 29 21:40:55 crc kubenswrapper[4911]: I0929 21:40:55.583186 4911 generic.go:334] "Generic (PLEG): container finished" podID="50640abc-40db-4390-82d1-f3cfc76da71c" containerID="5dec289dec4d2c01af51bffa09906f4044b26096f9186eb8b0d1b24f0055ec27" exitCode=0 Sep 29 21:40:55 crc kubenswrapper[4911]: I0929 21:40:55.583262 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w647f" event={"ID":"50640abc-40db-4390-82d1-f3cfc76da71c","Type":"ContainerDied","Data":"5dec289dec4d2c01af51bffa09906f4044b26096f9186eb8b0d1b24f0055ec27"} Sep 29 21:40:55 crc kubenswrapper[4911]: I0929 21:40:55.583541 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w647f" event={"ID":"50640abc-40db-4390-82d1-f3cfc76da71c","Type":"ContainerStarted","Data":"4a9c99b6ceab26bcf54375dc4957b5762f55e899af1d807a48454b472085e569"} Sep 29 21:40:55 crc kubenswrapper[4911]: I0929 21:40:55.583565 4911 scope.go:117] "RemoveContainer" containerID="3f4ab040675bb23b2a12316fff86293a5b72278bad6949dfbe357c01f7df89f3" Sep 29 21:40:55 crc kubenswrapper[4911]: I0929 21:40:55.586070 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-878rf" event={"ID":"66dda823-f2ce-4a7a-9632-ea5a4022de8d","Type":"ContainerStarted","Data":"83b68dd839a5d87aca43df958cc7cd939dfba565dc2e0892d9891c0acece0d5b"} Sep 29 21:40:55 crc kubenswrapper[4911]: I0929 21:40:55.963785 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-cfk2q" Sep 29 21:40:56 crc kubenswrapper[4911]: I0929 21:40:56.016074 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c5nlw\" (UniqueName: \"kubernetes.io/projected/372a6b13-1d3f-4a0b-9210-4f5b669d486a-kube-api-access-c5nlw\") pod \"372a6b13-1d3f-4a0b-9210-4f5b669d486a\" (UID: \"372a6b13-1d3f-4a0b-9210-4f5b669d486a\") " Sep 29 21:40:56 crc kubenswrapper[4911]: I0929 21:40:56.032434 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/372a6b13-1d3f-4a0b-9210-4f5b669d486a-kube-api-access-c5nlw" (OuterVolumeSpecName: "kube-api-access-c5nlw") pod "372a6b13-1d3f-4a0b-9210-4f5b669d486a" (UID: "372a6b13-1d3f-4a0b-9210-4f5b669d486a"). InnerVolumeSpecName "kube-api-access-c5nlw". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:40:56 crc kubenswrapper[4911]: I0929 21:40:56.118167 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c5nlw\" (UniqueName: \"kubernetes.io/projected/372a6b13-1d3f-4a0b-9210-4f5b669d486a-kube-api-access-c5nlw\") on node \"crc\" DevicePath \"\"" Sep 29 21:40:56 crc kubenswrapper[4911]: I0929 21:40:56.607879 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-cfk2q" event={"ID":"372a6b13-1d3f-4a0b-9210-4f5b669d486a","Type":"ContainerDied","Data":"3deaebbc7a4be37dc633dad330673c499022fe62d0f9285b5d8d6fe364793162"} Sep 29 21:40:56 crc kubenswrapper[4911]: I0929 21:40:56.608123 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3deaebbc7a4be37dc633dad330673c499022fe62d0f9285b5d8d6fe364793162" Sep 29 21:40:56 crc kubenswrapper[4911]: I0929 21:40:56.607906 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-cfk2q" Sep 29 21:40:57 crc kubenswrapper[4911]: I0929 21:40:57.114642 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-nq2n4"] Sep 29 21:40:57 crc kubenswrapper[4911]: E0929 21:40:57.115586 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="372a6b13-1d3f-4a0b-9210-4f5b669d486a" containerName="mariadb-database-create" Sep 29 21:40:57 crc kubenswrapper[4911]: I0929 21:40:57.125543 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="372a6b13-1d3f-4a0b-9210-4f5b669d486a" containerName="mariadb-database-create" Sep 29 21:40:57 crc kubenswrapper[4911]: I0929 21:40:57.126038 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="372a6b13-1d3f-4a0b-9210-4f5b669d486a" containerName="mariadb-database-create" Sep 29 21:40:57 crc kubenswrapper[4911]: I0929 21:40:57.126754 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-nq2n4"] Sep 29 21:40:57 crc kubenswrapper[4911]: I0929 21:40:57.126927 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-nq2n4" Sep 29 21:40:57 crc kubenswrapper[4911]: I0929 21:40:57.238427 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kr7wh\" (UniqueName: \"kubernetes.io/projected/d4773910-e3b9-4552-bb42-45f748b11e65-kube-api-access-kr7wh\") pod \"keystone-db-create-nq2n4\" (UID: \"d4773910-e3b9-4552-bb42-45f748b11e65\") " pod="openstack/keystone-db-create-nq2n4" Sep 29 21:40:57 crc kubenswrapper[4911]: I0929 21:40:57.339538 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kr7wh\" (UniqueName: \"kubernetes.io/projected/d4773910-e3b9-4552-bb42-45f748b11e65-kube-api-access-kr7wh\") pod \"keystone-db-create-nq2n4\" (UID: \"d4773910-e3b9-4552-bb42-45f748b11e65\") " pod="openstack/keystone-db-create-nq2n4" Sep 29 21:40:57 crc kubenswrapper[4911]: I0929 21:40:57.370327 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kr7wh\" (UniqueName: \"kubernetes.io/projected/d4773910-e3b9-4552-bb42-45f748b11e65-kube-api-access-kr7wh\") pod \"keystone-db-create-nq2n4\" (UID: \"d4773910-e3b9-4552-bb42-45f748b11e65\") " pod="openstack/keystone-db-create-nq2n4" Sep 29 21:40:57 crc kubenswrapper[4911]: I0929 21:40:57.456681 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-nq2n4" Sep 29 21:40:57 crc kubenswrapper[4911]: I0929 21:40:57.482326 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-876nd"] Sep 29 21:40:57 crc kubenswrapper[4911]: I0929 21:40:57.483286 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-876nd" Sep 29 21:40:57 crc kubenswrapper[4911]: I0929 21:40:57.494235 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-876nd"] Sep 29 21:40:57 crc kubenswrapper[4911]: I0929 21:40:57.543659 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5xfsh\" (UniqueName: \"kubernetes.io/projected/7ac967ca-130d-4139-bb7e-57bbd1542f24-kube-api-access-5xfsh\") pod \"placement-db-create-876nd\" (UID: \"7ac967ca-130d-4139-bb7e-57bbd1542f24\") " pod="openstack/placement-db-create-876nd" Sep 29 21:40:57 crc kubenswrapper[4911]: I0929 21:40:57.645888 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5xfsh\" (UniqueName: \"kubernetes.io/projected/7ac967ca-130d-4139-bb7e-57bbd1542f24-kube-api-access-5xfsh\") pod \"placement-db-create-876nd\" (UID: \"7ac967ca-130d-4139-bb7e-57bbd1542f24\") " pod="openstack/placement-db-create-876nd" Sep 29 21:40:57 crc kubenswrapper[4911]: I0929 21:40:57.682387 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5xfsh\" (UniqueName: \"kubernetes.io/projected/7ac967ca-130d-4139-bb7e-57bbd1542f24-kube-api-access-5xfsh\") pod \"placement-db-create-876nd\" (UID: \"7ac967ca-130d-4139-bb7e-57bbd1542f24\") " pod="openstack/placement-db-create-876nd" Sep 29 21:40:57 crc kubenswrapper[4911]: I0929 21:40:57.809461 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-876nd" Sep 29 21:40:58 crc kubenswrapper[4911]: I0929 21:40:58.257490 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/00a836d8-70df-4583-948f-e6869d77e432-etc-swift\") pod \"swift-storage-0\" (UID: \"00a836d8-70df-4583-948f-e6869d77e432\") " pod="openstack/swift-storage-0" Sep 29 21:40:58 crc kubenswrapper[4911]: E0929 21:40:58.257727 4911 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Sep 29 21:40:58 crc kubenswrapper[4911]: E0929 21:40:58.257775 4911 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Sep 29 21:40:58 crc kubenswrapper[4911]: E0929 21:40:58.257900 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/00a836d8-70df-4583-948f-e6869d77e432-etc-swift podName:00a836d8-70df-4583-948f-e6869d77e432 nodeName:}" failed. No retries permitted until 2025-09-29 21:41:06.257868466 +0000 UTC m=+944.234981167 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/00a836d8-70df-4583-948f-e6869d77e432-etc-swift") pod "swift-storage-0" (UID: "00a836d8-70df-4583-948f-e6869d77e432") : configmap "swift-ring-files" not found Sep 29 21:40:59 crc kubenswrapper[4911]: I0929 21:40:59.174890 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-nq2n4"] Sep 29 21:40:59 crc kubenswrapper[4911]: W0929 21:40:59.194363 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd4773910_e3b9_4552_bb42_45f748b11e65.slice/crio-ec4be943b2690e9a986fb644070e03df89a5f98f74d1930abba06bdf997164b4 WatchSource:0}: Error finding container ec4be943b2690e9a986fb644070e03df89a5f98f74d1930abba06bdf997164b4: Status 404 returned error can't find the container with id ec4be943b2690e9a986fb644070e03df89a5f98f74d1930abba06bdf997164b4 Sep 29 21:40:59 crc kubenswrapper[4911]: I0929 21:40:59.236504 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-876nd"] Sep 29 21:40:59 crc kubenswrapper[4911]: W0929 21:40:59.247495 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7ac967ca_130d_4139_bb7e_57bbd1542f24.slice/crio-a1d66e3232589d316bae9041590ae49cfa1ee2788e659bacc0fe5eeea282c885 WatchSource:0}: Error finding container a1d66e3232589d316bae9041590ae49cfa1ee2788e659bacc0fe5eeea282c885: Status 404 returned error can't find the container with id a1d66e3232589d316bae9041590ae49cfa1ee2788e659bacc0fe5eeea282c885 Sep 29 21:40:59 crc kubenswrapper[4911]: I0929 21:40:59.647022 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-698758b865-m6jw6" Sep 29 21:40:59 crc kubenswrapper[4911]: I0929 21:40:59.648509 4911 generic.go:334] "Generic (PLEG): container finished" podID="7ac967ca-130d-4139-bb7e-57bbd1542f24" containerID="0e18a6afa3fe010b0c3d20173b764f4d2e215d032365b1fc5fc9d1b342a63e15" exitCode=0 Sep 29 21:40:59 crc kubenswrapper[4911]: I0929 21:40:59.648607 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-876nd" event={"ID":"7ac967ca-130d-4139-bb7e-57bbd1542f24","Type":"ContainerDied","Data":"0e18a6afa3fe010b0c3d20173b764f4d2e215d032365b1fc5fc9d1b342a63e15"} Sep 29 21:40:59 crc kubenswrapper[4911]: I0929 21:40:59.648646 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-876nd" event={"ID":"7ac967ca-130d-4139-bb7e-57bbd1542f24","Type":"ContainerStarted","Data":"a1d66e3232589d316bae9041590ae49cfa1ee2788e659bacc0fe5eeea282c885"} Sep 29 21:40:59 crc kubenswrapper[4911]: I0929 21:40:59.651900 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-878rf" event={"ID":"66dda823-f2ce-4a7a-9632-ea5a4022de8d","Type":"ContainerStarted","Data":"ba27de4fbf17f52dfa2f65381848f9e9f2ed3a579d59cfe2bf7d9931fa231bdc"} Sep 29 21:40:59 crc kubenswrapper[4911]: I0929 21:40:59.659552 4911 generic.go:334] "Generic (PLEG): container finished" podID="d4773910-e3b9-4552-bb42-45f748b11e65" containerID="0f73f203cc8f44a43c149655119a453cfcbd51d7e091efa9966818376cc936d0" exitCode=0 Sep 29 21:40:59 crc kubenswrapper[4911]: I0929 21:40:59.659613 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-nq2n4" event={"ID":"d4773910-e3b9-4552-bb42-45f748b11e65","Type":"ContainerDied","Data":"0f73f203cc8f44a43c149655119a453cfcbd51d7e091efa9966818376cc936d0"} Sep 29 21:40:59 crc kubenswrapper[4911]: I0929 21:40:59.659644 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-nq2n4" event={"ID":"d4773910-e3b9-4552-bb42-45f748b11e65","Type":"ContainerStarted","Data":"ec4be943b2690e9a986fb644070e03df89a5f98f74d1930abba06bdf997164b4"} Sep 29 21:40:59 crc kubenswrapper[4911]: I0929 21:40:59.708713 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-878rf" podStartSLOduration=2.010267326 podStartE2EDuration="5.708692449s" podCreationTimestamp="2025-09-29 21:40:54 +0000 UTC" firstStartedPulling="2025-09-29 21:40:55.038248807 +0000 UTC m=+933.015361518" lastFinishedPulling="2025-09-29 21:40:58.73667394 +0000 UTC m=+936.713786641" observedRunningTime="2025-09-29 21:40:59.705412457 +0000 UTC m=+937.682525198" watchObservedRunningTime="2025-09-29 21:40:59.708692449 +0000 UTC m=+937.685805160" Sep 29 21:40:59 crc kubenswrapper[4911]: I0929 21:40:59.739382 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Sep 29 21:40:59 crc kubenswrapper[4911]: I0929 21:40:59.754506 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-dwtnp"] Sep 29 21:40:59 crc kubenswrapper[4911]: I0929 21:40:59.755075 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-57d769cc4f-dwtnp" podUID="f4dcfa4a-a27b-4b42-938d-d7509e1d2f20" containerName="dnsmasq-dns" containerID="cri-o://a7adafe88e5657c3a8efd1d863a44f1876e9c32579273fb07c2264924ad349de" gracePeriod=10 Sep 29 21:41:00 crc kubenswrapper[4911]: I0929 21:41:00.354466 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-dwtnp" Sep 29 21:41:00 crc kubenswrapper[4911]: I0929 21:41:00.404214 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f4dcfa4a-a27b-4b42-938d-d7509e1d2f20-dns-svc\") pod \"f4dcfa4a-a27b-4b42-938d-d7509e1d2f20\" (UID: \"f4dcfa4a-a27b-4b42-938d-d7509e1d2f20\") " Sep 29 21:41:00 crc kubenswrapper[4911]: I0929 21:41:00.404283 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2rmnl\" (UniqueName: \"kubernetes.io/projected/f4dcfa4a-a27b-4b42-938d-d7509e1d2f20-kube-api-access-2rmnl\") pod \"f4dcfa4a-a27b-4b42-938d-d7509e1d2f20\" (UID: \"f4dcfa4a-a27b-4b42-938d-d7509e1d2f20\") " Sep 29 21:41:00 crc kubenswrapper[4911]: I0929 21:41:00.404392 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f4dcfa4a-a27b-4b42-938d-d7509e1d2f20-config\") pod \"f4dcfa4a-a27b-4b42-938d-d7509e1d2f20\" (UID: \"f4dcfa4a-a27b-4b42-938d-d7509e1d2f20\") " Sep 29 21:41:00 crc kubenswrapper[4911]: I0929 21:41:00.410341 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f4dcfa4a-a27b-4b42-938d-d7509e1d2f20-kube-api-access-2rmnl" (OuterVolumeSpecName: "kube-api-access-2rmnl") pod "f4dcfa4a-a27b-4b42-938d-d7509e1d2f20" (UID: "f4dcfa4a-a27b-4b42-938d-d7509e1d2f20"). InnerVolumeSpecName "kube-api-access-2rmnl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:41:00 crc kubenswrapper[4911]: E0929 21:41:00.480152 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/f4dcfa4a-a27b-4b42-938d-d7509e1d2f20-dns-svc podName:f4dcfa4a-a27b-4b42-938d-d7509e1d2f20 nodeName:}" failed. No retries permitted until 2025-09-29 21:41:00.980124907 +0000 UTC m=+938.957237578 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "dns-svc" (UniqueName: "kubernetes.io/configmap/f4dcfa4a-a27b-4b42-938d-d7509e1d2f20-dns-svc") pod "f4dcfa4a-a27b-4b42-938d-d7509e1d2f20" (UID: "f4dcfa4a-a27b-4b42-938d-d7509e1d2f20") : error deleting /var/lib/kubelet/pods/f4dcfa4a-a27b-4b42-938d-d7509e1d2f20/volume-subpaths: remove /var/lib/kubelet/pods/f4dcfa4a-a27b-4b42-938d-d7509e1d2f20/volume-subpaths: no such file or directory Sep 29 21:41:00 crc kubenswrapper[4911]: I0929 21:41:00.480497 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f4dcfa4a-a27b-4b42-938d-d7509e1d2f20-config" (OuterVolumeSpecName: "config") pod "f4dcfa4a-a27b-4b42-938d-d7509e1d2f20" (UID: "f4dcfa4a-a27b-4b42-938d-d7509e1d2f20"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:41:00 crc kubenswrapper[4911]: I0929 21:41:00.506154 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2rmnl\" (UniqueName: \"kubernetes.io/projected/f4dcfa4a-a27b-4b42-938d-d7509e1d2f20-kube-api-access-2rmnl\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:00 crc kubenswrapper[4911]: I0929 21:41:00.506180 4911 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f4dcfa4a-a27b-4b42-938d-d7509e1d2f20-config\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:00 crc kubenswrapper[4911]: I0929 21:41:00.675533 4911 generic.go:334] "Generic (PLEG): container finished" podID="f4dcfa4a-a27b-4b42-938d-d7509e1d2f20" containerID="a7adafe88e5657c3a8efd1d863a44f1876e9c32579273fb07c2264924ad349de" exitCode=0 Sep 29 21:41:00 crc kubenswrapper[4911]: I0929 21:41:00.675741 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-dwtnp" Sep 29 21:41:00 crc kubenswrapper[4911]: I0929 21:41:00.676391 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-dwtnp" event={"ID":"f4dcfa4a-a27b-4b42-938d-d7509e1d2f20","Type":"ContainerDied","Data":"a7adafe88e5657c3a8efd1d863a44f1876e9c32579273fb07c2264924ad349de"} Sep 29 21:41:00 crc kubenswrapper[4911]: I0929 21:41:00.677209 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-dwtnp" event={"ID":"f4dcfa4a-a27b-4b42-938d-d7509e1d2f20","Type":"ContainerDied","Data":"34749579387cfbac2129c8fd8166378449ebeb11cecec8356514a4f0aded861e"} Sep 29 21:41:00 crc kubenswrapper[4911]: I0929 21:41:00.677235 4911 scope.go:117] "RemoveContainer" containerID="a7adafe88e5657c3a8efd1d863a44f1876e9c32579273fb07c2264924ad349de" Sep 29 21:41:00 crc kubenswrapper[4911]: I0929 21:41:00.726463 4911 scope.go:117] "RemoveContainer" containerID="be129b1955864f39a5eaf751b8ea4be12d97a9f73044a47c12f0a15125023174" Sep 29 21:41:00 crc kubenswrapper[4911]: I0929 21:41:00.748321 4911 scope.go:117] "RemoveContainer" containerID="a7adafe88e5657c3a8efd1d863a44f1876e9c32579273fb07c2264924ad349de" Sep 29 21:41:00 crc kubenswrapper[4911]: E0929 21:41:00.748911 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a7adafe88e5657c3a8efd1d863a44f1876e9c32579273fb07c2264924ad349de\": container with ID starting with a7adafe88e5657c3a8efd1d863a44f1876e9c32579273fb07c2264924ad349de not found: ID does not exist" containerID="a7adafe88e5657c3a8efd1d863a44f1876e9c32579273fb07c2264924ad349de" Sep 29 21:41:00 crc kubenswrapper[4911]: I0929 21:41:00.748955 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a7adafe88e5657c3a8efd1d863a44f1876e9c32579273fb07c2264924ad349de"} err="failed to get container status \"a7adafe88e5657c3a8efd1d863a44f1876e9c32579273fb07c2264924ad349de\": rpc error: code = NotFound desc = could not find container \"a7adafe88e5657c3a8efd1d863a44f1876e9c32579273fb07c2264924ad349de\": container with ID starting with a7adafe88e5657c3a8efd1d863a44f1876e9c32579273fb07c2264924ad349de not found: ID does not exist" Sep 29 21:41:00 crc kubenswrapper[4911]: I0929 21:41:00.748985 4911 scope.go:117] "RemoveContainer" containerID="be129b1955864f39a5eaf751b8ea4be12d97a9f73044a47c12f0a15125023174" Sep 29 21:41:00 crc kubenswrapper[4911]: E0929 21:41:00.749415 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"be129b1955864f39a5eaf751b8ea4be12d97a9f73044a47c12f0a15125023174\": container with ID starting with be129b1955864f39a5eaf751b8ea4be12d97a9f73044a47c12f0a15125023174 not found: ID does not exist" containerID="be129b1955864f39a5eaf751b8ea4be12d97a9f73044a47c12f0a15125023174" Sep 29 21:41:00 crc kubenswrapper[4911]: I0929 21:41:00.749457 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"be129b1955864f39a5eaf751b8ea4be12d97a9f73044a47c12f0a15125023174"} err="failed to get container status \"be129b1955864f39a5eaf751b8ea4be12d97a9f73044a47c12f0a15125023174\": rpc error: code = NotFound desc = could not find container \"be129b1955864f39a5eaf751b8ea4be12d97a9f73044a47c12f0a15125023174\": container with ID starting with be129b1955864f39a5eaf751b8ea4be12d97a9f73044a47c12f0a15125023174 not found: ID does not exist" Sep 29 21:41:01 crc kubenswrapper[4911]: I0929 21:41:01.013077 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f4dcfa4a-a27b-4b42-938d-d7509e1d2f20-dns-svc\") pod \"f4dcfa4a-a27b-4b42-938d-d7509e1d2f20\" (UID: \"f4dcfa4a-a27b-4b42-938d-d7509e1d2f20\") " Sep 29 21:41:01 crc kubenswrapper[4911]: I0929 21:41:01.013740 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f4dcfa4a-a27b-4b42-938d-d7509e1d2f20-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f4dcfa4a-a27b-4b42-938d-d7509e1d2f20" (UID: "f4dcfa4a-a27b-4b42-938d-d7509e1d2f20"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:41:01 crc kubenswrapper[4911]: I0929 21:41:01.014099 4911 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f4dcfa4a-a27b-4b42-938d-d7509e1d2f20-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:01 crc kubenswrapper[4911]: I0929 21:41:01.120571 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-876nd" Sep 29 21:41:01 crc kubenswrapper[4911]: I0929 21:41:01.125859 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-nq2n4" Sep 29 21:41:01 crc kubenswrapper[4911]: I0929 21:41:01.216240 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5xfsh\" (UniqueName: \"kubernetes.io/projected/7ac967ca-130d-4139-bb7e-57bbd1542f24-kube-api-access-5xfsh\") pod \"7ac967ca-130d-4139-bb7e-57bbd1542f24\" (UID: \"7ac967ca-130d-4139-bb7e-57bbd1542f24\") " Sep 29 21:41:01 crc kubenswrapper[4911]: I0929 21:41:01.216475 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kr7wh\" (UniqueName: \"kubernetes.io/projected/d4773910-e3b9-4552-bb42-45f748b11e65-kube-api-access-kr7wh\") pod \"d4773910-e3b9-4552-bb42-45f748b11e65\" (UID: \"d4773910-e3b9-4552-bb42-45f748b11e65\") " Sep 29 21:41:01 crc kubenswrapper[4911]: I0929 21:41:01.220098 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7ac967ca-130d-4139-bb7e-57bbd1542f24-kube-api-access-5xfsh" (OuterVolumeSpecName: "kube-api-access-5xfsh") pod "7ac967ca-130d-4139-bb7e-57bbd1542f24" (UID: "7ac967ca-130d-4139-bb7e-57bbd1542f24"). InnerVolumeSpecName "kube-api-access-5xfsh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:41:01 crc kubenswrapper[4911]: I0929 21:41:01.221069 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d4773910-e3b9-4552-bb42-45f748b11e65-kube-api-access-kr7wh" (OuterVolumeSpecName: "kube-api-access-kr7wh") pod "d4773910-e3b9-4552-bb42-45f748b11e65" (UID: "d4773910-e3b9-4552-bb42-45f748b11e65"). InnerVolumeSpecName "kube-api-access-kr7wh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:41:01 crc kubenswrapper[4911]: I0929 21:41:01.306976 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-dwtnp"] Sep 29 21:41:01 crc kubenswrapper[4911]: I0929 21:41:01.311830 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-dwtnp"] Sep 29 21:41:01 crc kubenswrapper[4911]: I0929 21:41:01.318509 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5xfsh\" (UniqueName: \"kubernetes.io/projected/7ac967ca-130d-4139-bb7e-57bbd1542f24-kube-api-access-5xfsh\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:01 crc kubenswrapper[4911]: I0929 21:41:01.318535 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kr7wh\" (UniqueName: \"kubernetes.io/projected/d4773910-e3b9-4552-bb42-45f748b11e65-kube-api-access-kr7wh\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:01 crc kubenswrapper[4911]: I0929 21:41:01.688462 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-876nd" event={"ID":"7ac967ca-130d-4139-bb7e-57bbd1542f24","Type":"ContainerDied","Data":"a1d66e3232589d316bae9041590ae49cfa1ee2788e659bacc0fe5eeea282c885"} Sep 29 21:41:01 crc kubenswrapper[4911]: I0929 21:41:01.688519 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a1d66e3232589d316bae9041590ae49cfa1ee2788e659bacc0fe5eeea282c885" Sep 29 21:41:01 crc kubenswrapper[4911]: I0929 21:41:01.688488 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-876nd" Sep 29 21:41:01 crc kubenswrapper[4911]: I0929 21:41:01.691890 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-nq2n4" event={"ID":"d4773910-e3b9-4552-bb42-45f748b11e65","Type":"ContainerDied","Data":"ec4be943b2690e9a986fb644070e03df89a5f98f74d1930abba06bdf997164b4"} Sep 29 21:41:01 crc kubenswrapper[4911]: I0929 21:41:01.691929 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ec4be943b2690e9a986fb644070e03df89a5f98f74d1930abba06bdf997164b4" Sep 29 21:41:01 crc kubenswrapper[4911]: I0929 21:41:01.691982 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-nq2n4" Sep 29 21:41:02 crc kubenswrapper[4911]: I0929 21:41:02.717918 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4dcfa4a-a27b-4b42-938d-d7509e1d2f20" path="/var/lib/kubelet/pods/f4dcfa4a-a27b-4b42-938d-d7509e1d2f20/volumes" Sep 29 21:41:02 crc kubenswrapper[4911]: I0929 21:41:02.885058 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-9075-account-create-b7jml"] Sep 29 21:41:02 crc kubenswrapper[4911]: E0929 21:41:02.885342 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4773910-e3b9-4552-bb42-45f748b11e65" containerName="mariadb-database-create" Sep 29 21:41:02 crc kubenswrapper[4911]: I0929 21:41:02.885353 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4773910-e3b9-4552-bb42-45f748b11e65" containerName="mariadb-database-create" Sep 29 21:41:02 crc kubenswrapper[4911]: E0929 21:41:02.885368 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4dcfa4a-a27b-4b42-938d-d7509e1d2f20" containerName="init" Sep 29 21:41:02 crc kubenswrapper[4911]: I0929 21:41:02.885374 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4dcfa4a-a27b-4b42-938d-d7509e1d2f20" containerName="init" Sep 29 21:41:02 crc kubenswrapper[4911]: E0929 21:41:02.885387 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4dcfa4a-a27b-4b42-938d-d7509e1d2f20" containerName="dnsmasq-dns" Sep 29 21:41:02 crc kubenswrapper[4911]: I0929 21:41:02.885393 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4dcfa4a-a27b-4b42-938d-d7509e1d2f20" containerName="dnsmasq-dns" Sep 29 21:41:02 crc kubenswrapper[4911]: E0929 21:41:02.885411 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ac967ca-130d-4139-bb7e-57bbd1542f24" containerName="mariadb-database-create" Sep 29 21:41:02 crc kubenswrapper[4911]: I0929 21:41:02.885417 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ac967ca-130d-4139-bb7e-57bbd1542f24" containerName="mariadb-database-create" Sep 29 21:41:02 crc kubenswrapper[4911]: I0929 21:41:02.885549 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4773910-e3b9-4552-bb42-45f748b11e65" containerName="mariadb-database-create" Sep 29 21:41:02 crc kubenswrapper[4911]: I0929 21:41:02.885588 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4dcfa4a-a27b-4b42-938d-d7509e1d2f20" containerName="dnsmasq-dns" Sep 29 21:41:02 crc kubenswrapper[4911]: I0929 21:41:02.885608 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="7ac967ca-130d-4139-bb7e-57bbd1542f24" containerName="mariadb-database-create" Sep 29 21:41:02 crc kubenswrapper[4911]: I0929 21:41:02.886103 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-9075-account-create-b7jml" Sep 29 21:41:02 crc kubenswrapper[4911]: I0929 21:41:02.889276 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Sep 29 21:41:02 crc kubenswrapper[4911]: I0929 21:41:02.906202 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-9075-account-create-b7jml"] Sep 29 21:41:02 crc kubenswrapper[4911]: I0929 21:41:02.947368 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gvk78\" (UniqueName: \"kubernetes.io/projected/9d246d68-02be-4ee1-b0c8-f7d3a41dc1b9-kube-api-access-gvk78\") pod \"glance-9075-account-create-b7jml\" (UID: \"9d246d68-02be-4ee1-b0c8-f7d3a41dc1b9\") " pod="openstack/glance-9075-account-create-b7jml" Sep 29 21:41:03 crc kubenswrapper[4911]: I0929 21:41:03.048743 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gvk78\" (UniqueName: \"kubernetes.io/projected/9d246d68-02be-4ee1-b0c8-f7d3a41dc1b9-kube-api-access-gvk78\") pod \"glance-9075-account-create-b7jml\" (UID: \"9d246d68-02be-4ee1-b0c8-f7d3a41dc1b9\") " pod="openstack/glance-9075-account-create-b7jml" Sep 29 21:41:03 crc kubenswrapper[4911]: I0929 21:41:03.065988 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gvk78\" (UniqueName: \"kubernetes.io/projected/9d246d68-02be-4ee1-b0c8-f7d3a41dc1b9-kube-api-access-gvk78\") pod \"glance-9075-account-create-b7jml\" (UID: \"9d246d68-02be-4ee1-b0c8-f7d3a41dc1b9\") " pod="openstack/glance-9075-account-create-b7jml" Sep 29 21:41:03 crc kubenswrapper[4911]: I0929 21:41:03.209570 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-9075-account-create-b7jml" Sep 29 21:41:03 crc kubenswrapper[4911]: I0929 21:41:03.701887 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-9075-account-create-b7jml"] Sep 29 21:41:03 crc kubenswrapper[4911]: W0929 21:41:03.708253 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9d246d68_02be_4ee1_b0c8_f7d3a41dc1b9.slice/crio-3ae0b1a43270ac74b6e3c0e0229013f36b3085384f6056bbcb01aaa581c1f849 WatchSource:0}: Error finding container 3ae0b1a43270ac74b6e3c0e0229013f36b3085384f6056bbcb01aaa581c1f849: Status 404 returned error can't find the container with id 3ae0b1a43270ac74b6e3c0e0229013f36b3085384f6056bbcb01aaa581c1f849 Sep 29 21:41:04 crc kubenswrapper[4911]: I0929 21:41:04.722035 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-9075-account-create-b7jml" event={"ID":"9d246d68-02be-4ee1-b0c8-f7d3a41dc1b9","Type":"ContainerStarted","Data":"3ae0b1a43270ac74b6e3c0e0229013f36b3085384f6056bbcb01aaa581c1f849"} Sep 29 21:41:05 crc kubenswrapper[4911]: I0929 21:41:05.733660 4911 generic.go:334] "Generic (PLEG): container finished" podID="9d246d68-02be-4ee1-b0c8-f7d3a41dc1b9" containerID="25ffa1d06c887d7b7a36eb2c184d1b9b41051db863020671fd23f778ad04e12e" exitCode=0 Sep 29 21:41:05 crc kubenswrapper[4911]: I0929 21:41:05.733714 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-9075-account-create-b7jml" event={"ID":"9d246d68-02be-4ee1-b0c8-f7d3a41dc1b9","Type":"ContainerDied","Data":"25ffa1d06c887d7b7a36eb2c184d1b9b41051db863020671fd23f778ad04e12e"} Sep 29 21:41:06 crc kubenswrapper[4911]: I0929 21:41:06.314065 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/00a836d8-70df-4583-948f-e6869d77e432-etc-swift\") pod \"swift-storage-0\" (UID: \"00a836d8-70df-4583-948f-e6869d77e432\") " pod="openstack/swift-storage-0" Sep 29 21:41:06 crc kubenswrapper[4911]: I0929 21:41:06.329690 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/00a836d8-70df-4583-948f-e6869d77e432-etc-swift\") pod \"swift-storage-0\" (UID: \"00a836d8-70df-4583-948f-e6869d77e432\") " pod="openstack/swift-storage-0" Sep 29 21:41:06 crc kubenswrapper[4911]: I0929 21:41:06.381385 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Sep 29 21:41:06 crc kubenswrapper[4911]: I0929 21:41:06.746195 4911 generic.go:334] "Generic (PLEG): container finished" podID="66dda823-f2ce-4a7a-9632-ea5a4022de8d" containerID="ba27de4fbf17f52dfa2f65381848f9e9f2ed3a579d59cfe2bf7d9931fa231bdc" exitCode=0 Sep 29 21:41:06 crc kubenswrapper[4911]: I0929 21:41:06.746298 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-878rf" event={"ID":"66dda823-f2ce-4a7a-9632-ea5a4022de8d","Type":"ContainerDied","Data":"ba27de4fbf17f52dfa2f65381848f9e9f2ed3a579d59cfe2bf7d9931fa231bdc"} Sep 29 21:41:07 crc kubenswrapper[4911]: I0929 21:41:07.029062 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Sep 29 21:41:07 crc kubenswrapper[4911]: W0929 21:41:07.036197 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod00a836d8_70df_4583_948f_e6869d77e432.slice/crio-82543e9cb65766df49d88a1bfc88e1cab4eadbad476641156e8ff23e4ef05969 WatchSource:0}: Error finding container 82543e9cb65766df49d88a1bfc88e1cab4eadbad476641156e8ff23e4ef05969: Status 404 returned error can't find the container with id 82543e9cb65766df49d88a1bfc88e1cab4eadbad476641156e8ff23e4ef05969 Sep 29 21:41:07 crc kubenswrapper[4911]: I0929 21:41:07.119157 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-9075-account-create-b7jml" Sep 29 21:41:07 crc kubenswrapper[4911]: I0929 21:41:07.135641 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gvk78\" (UniqueName: \"kubernetes.io/projected/9d246d68-02be-4ee1-b0c8-f7d3a41dc1b9-kube-api-access-gvk78\") pod \"9d246d68-02be-4ee1-b0c8-f7d3a41dc1b9\" (UID: \"9d246d68-02be-4ee1-b0c8-f7d3a41dc1b9\") " Sep 29 21:41:07 crc kubenswrapper[4911]: I0929 21:41:07.142196 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d246d68-02be-4ee1-b0c8-f7d3a41dc1b9-kube-api-access-gvk78" (OuterVolumeSpecName: "kube-api-access-gvk78") pod "9d246d68-02be-4ee1-b0c8-f7d3a41dc1b9" (UID: "9d246d68-02be-4ee1-b0c8-f7d3a41dc1b9"). InnerVolumeSpecName "kube-api-access-gvk78". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:41:07 crc kubenswrapper[4911]: I0929 21:41:07.232620 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-33c5-account-create-shdxk"] Sep 29 21:41:07 crc kubenswrapper[4911]: E0929 21:41:07.232959 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d246d68-02be-4ee1-b0c8-f7d3a41dc1b9" containerName="mariadb-account-create" Sep 29 21:41:07 crc kubenswrapper[4911]: I0929 21:41:07.232969 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d246d68-02be-4ee1-b0c8-f7d3a41dc1b9" containerName="mariadb-account-create" Sep 29 21:41:07 crc kubenswrapper[4911]: I0929 21:41:07.233150 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="9d246d68-02be-4ee1-b0c8-f7d3a41dc1b9" containerName="mariadb-account-create" Sep 29 21:41:07 crc kubenswrapper[4911]: I0929 21:41:07.233595 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-33c5-account-create-shdxk" Sep 29 21:41:07 crc kubenswrapper[4911]: I0929 21:41:07.242505 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gvk78\" (UniqueName: \"kubernetes.io/projected/9d246d68-02be-4ee1-b0c8-f7d3a41dc1b9-kube-api-access-gvk78\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:07 crc kubenswrapper[4911]: I0929 21:41:07.242540 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-33c5-account-create-shdxk"] Sep 29 21:41:07 crc kubenswrapper[4911]: I0929 21:41:07.265491 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Sep 29 21:41:07 crc kubenswrapper[4911]: I0929 21:41:07.343318 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n2rbl\" (UniqueName: \"kubernetes.io/projected/9759766a-e408-4d42-b84d-8acc6a3de4ee-kube-api-access-n2rbl\") pod \"keystone-33c5-account-create-shdxk\" (UID: \"9759766a-e408-4d42-b84d-8acc6a3de4ee\") " pod="openstack/keystone-33c5-account-create-shdxk" Sep 29 21:41:07 crc kubenswrapper[4911]: I0929 21:41:07.445705 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n2rbl\" (UniqueName: \"kubernetes.io/projected/9759766a-e408-4d42-b84d-8acc6a3de4ee-kube-api-access-n2rbl\") pod \"keystone-33c5-account-create-shdxk\" (UID: \"9759766a-e408-4d42-b84d-8acc6a3de4ee\") " pod="openstack/keystone-33c5-account-create-shdxk" Sep 29 21:41:07 crc kubenswrapper[4911]: I0929 21:41:07.467279 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n2rbl\" (UniqueName: \"kubernetes.io/projected/9759766a-e408-4d42-b84d-8acc6a3de4ee-kube-api-access-n2rbl\") pod \"keystone-33c5-account-create-shdxk\" (UID: \"9759766a-e408-4d42-b84d-8acc6a3de4ee\") " pod="openstack/keystone-33c5-account-create-shdxk" Sep 29 21:41:07 crc kubenswrapper[4911]: I0929 21:41:07.580940 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-33c5-account-create-shdxk" Sep 29 21:41:07 crc kubenswrapper[4911]: I0929 21:41:07.616124 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-4bbc-account-create-xdw6c"] Sep 29 21:41:07 crc kubenswrapper[4911]: I0929 21:41:07.618199 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-4bbc-account-create-xdw6c" Sep 29 21:41:07 crc kubenswrapper[4911]: I0929 21:41:07.620296 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Sep 29 21:41:07 crc kubenswrapper[4911]: I0929 21:41:07.625954 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-4bbc-account-create-xdw6c"] Sep 29 21:41:07 crc kubenswrapper[4911]: I0929 21:41:07.649545 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5j6sj\" (UniqueName: \"kubernetes.io/projected/e4bbd41e-cd3c-4d2e-aa8d-f1e474171118-kube-api-access-5j6sj\") pod \"placement-4bbc-account-create-xdw6c\" (UID: \"e4bbd41e-cd3c-4d2e-aa8d-f1e474171118\") " pod="openstack/placement-4bbc-account-create-xdw6c" Sep 29 21:41:07 crc kubenswrapper[4911]: I0929 21:41:07.750664 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5j6sj\" (UniqueName: \"kubernetes.io/projected/e4bbd41e-cd3c-4d2e-aa8d-f1e474171118-kube-api-access-5j6sj\") pod \"placement-4bbc-account-create-xdw6c\" (UID: \"e4bbd41e-cd3c-4d2e-aa8d-f1e474171118\") " pod="openstack/placement-4bbc-account-create-xdw6c" Sep 29 21:41:07 crc kubenswrapper[4911]: I0929 21:41:07.763682 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-9075-account-create-b7jml" Sep 29 21:41:07 crc kubenswrapper[4911]: I0929 21:41:07.763719 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-9075-account-create-b7jml" event={"ID":"9d246d68-02be-4ee1-b0c8-f7d3a41dc1b9","Type":"ContainerDied","Data":"3ae0b1a43270ac74b6e3c0e0229013f36b3085384f6056bbcb01aaa581c1f849"} Sep 29 21:41:07 crc kubenswrapper[4911]: I0929 21:41:07.763963 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3ae0b1a43270ac74b6e3c0e0229013f36b3085384f6056bbcb01aaa581c1f849" Sep 29 21:41:07 crc kubenswrapper[4911]: I0929 21:41:07.765976 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"00a836d8-70df-4583-948f-e6869d77e432","Type":"ContainerStarted","Data":"82543e9cb65766df49d88a1bfc88e1cab4eadbad476641156e8ff23e4ef05969"} Sep 29 21:41:07 crc kubenswrapper[4911]: I0929 21:41:07.773841 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"54bedb11-6943-4e34-a221-8dbd2cfd5eee","Type":"ContainerDied","Data":"6332677c2f853df112183ac5da0e7e95e8fdbc8790b02d3fe1434e560e32033d"} Sep 29 21:41:07 crc kubenswrapper[4911]: I0929 21:41:07.776527 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5j6sj\" (UniqueName: \"kubernetes.io/projected/e4bbd41e-cd3c-4d2e-aa8d-f1e474171118-kube-api-access-5j6sj\") pod \"placement-4bbc-account-create-xdw6c\" (UID: \"e4bbd41e-cd3c-4d2e-aa8d-f1e474171118\") " pod="openstack/placement-4bbc-account-create-xdw6c" Sep 29 21:41:07 crc kubenswrapper[4911]: I0929 21:41:07.773784 4911 generic.go:334] "Generic (PLEG): container finished" podID="54bedb11-6943-4e34-a221-8dbd2cfd5eee" containerID="6332677c2f853df112183ac5da0e7e95e8fdbc8790b02d3fe1434e560e32033d" exitCode=0 Sep 29 21:41:07 crc kubenswrapper[4911]: I0929 21:41:07.783773 4911 generic.go:334] "Generic (PLEG): container finished" podID="16704d0f-ad69-4cc9-890a-77c268d78151" containerID="337a146ff0ca619afb1efb1546eb8d6c2739a71f7a38410dc78114bda5bfa1ee" exitCode=0 Sep 29 21:41:07 crc kubenswrapper[4911]: I0929 21:41:07.784095 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"16704d0f-ad69-4cc9-890a-77c268d78151","Type":"ContainerDied","Data":"337a146ff0ca619afb1efb1546eb8d6c2739a71f7a38410dc78114bda5bfa1ee"} Sep 29 21:41:08 crc kubenswrapper[4911]: I0929 21:41:08.061840 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-33c5-account-create-shdxk"] Sep 29 21:41:08 crc kubenswrapper[4911]: I0929 21:41:08.065199 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-4bbc-account-create-xdw6c" Sep 29 21:41:08 crc kubenswrapper[4911]: I0929 21:41:08.127654 4911 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-nm8s4" podUID="755d3290-eae3-4e58-9870-63681ce460d5" containerName="ovn-controller" probeResult="failure" output=< Sep 29 21:41:08 crc kubenswrapper[4911]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Sep 29 21:41:08 crc kubenswrapper[4911]: > Sep 29 21:41:08 crc kubenswrapper[4911]: I0929 21:41:08.173620 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-xhwxg" Sep 29 21:41:08 crc kubenswrapper[4911]: I0929 21:41:08.183289 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-xhwxg" Sep 29 21:41:08 crc kubenswrapper[4911]: W0929 21:41:08.321196 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9759766a_e408_4d42_b84d_8acc6a3de4ee.slice/crio-cf361de61b96f2431b181f9d61992f0fd409573414a7511ee6f24feccbaf9fd5 WatchSource:0}: Error finding container cf361de61b96f2431b181f9d61992f0fd409573414a7511ee6f24feccbaf9fd5: Status 404 returned error can't find the container with id cf361de61b96f2431b181f9d61992f0fd409573414a7511ee6f24feccbaf9fd5 Sep 29 21:41:08 crc kubenswrapper[4911]: I0929 21:41:08.403490 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-nm8s4-config-hnj4q"] Sep 29 21:41:08 crc kubenswrapper[4911]: I0929 21:41:08.407458 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-nm8s4-config-hnj4q" Sep 29 21:41:08 crc kubenswrapper[4911]: I0929 21:41:08.415152 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Sep 29 21:41:08 crc kubenswrapper[4911]: I0929 21:41:08.433842 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-nm8s4-config-hnj4q"] Sep 29 21:41:08 crc kubenswrapper[4911]: I0929 21:41:08.462488 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/656ced97-6f43-422c-9118-6aa119d0070c-additional-scripts\") pod \"ovn-controller-nm8s4-config-hnj4q\" (UID: \"656ced97-6f43-422c-9118-6aa119d0070c\") " pod="openstack/ovn-controller-nm8s4-config-hnj4q" Sep 29 21:41:08 crc kubenswrapper[4911]: I0929 21:41:08.462570 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j8wvr\" (UniqueName: \"kubernetes.io/projected/656ced97-6f43-422c-9118-6aa119d0070c-kube-api-access-j8wvr\") pod \"ovn-controller-nm8s4-config-hnj4q\" (UID: \"656ced97-6f43-422c-9118-6aa119d0070c\") " pod="openstack/ovn-controller-nm8s4-config-hnj4q" Sep 29 21:41:08 crc kubenswrapper[4911]: I0929 21:41:08.462651 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/656ced97-6f43-422c-9118-6aa119d0070c-var-log-ovn\") pod \"ovn-controller-nm8s4-config-hnj4q\" (UID: \"656ced97-6f43-422c-9118-6aa119d0070c\") " pod="openstack/ovn-controller-nm8s4-config-hnj4q" Sep 29 21:41:08 crc kubenswrapper[4911]: I0929 21:41:08.462679 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/656ced97-6f43-422c-9118-6aa119d0070c-scripts\") pod \"ovn-controller-nm8s4-config-hnj4q\" (UID: \"656ced97-6f43-422c-9118-6aa119d0070c\") " pod="openstack/ovn-controller-nm8s4-config-hnj4q" Sep 29 21:41:08 crc kubenswrapper[4911]: I0929 21:41:08.462716 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/656ced97-6f43-422c-9118-6aa119d0070c-var-run-ovn\") pod \"ovn-controller-nm8s4-config-hnj4q\" (UID: \"656ced97-6f43-422c-9118-6aa119d0070c\") " pod="openstack/ovn-controller-nm8s4-config-hnj4q" Sep 29 21:41:08 crc kubenswrapper[4911]: I0929 21:41:08.462740 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/656ced97-6f43-422c-9118-6aa119d0070c-var-run\") pod \"ovn-controller-nm8s4-config-hnj4q\" (UID: \"656ced97-6f43-422c-9118-6aa119d0070c\") " pod="openstack/ovn-controller-nm8s4-config-hnj4q" Sep 29 21:41:08 crc kubenswrapper[4911]: I0929 21:41:08.475611 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-878rf" Sep 29 21:41:08 crc kubenswrapper[4911]: I0929 21:41:08.565106 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/66dda823-f2ce-4a7a-9632-ea5a4022de8d-dispersionconf\") pod \"66dda823-f2ce-4a7a-9632-ea5a4022de8d\" (UID: \"66dda823-f2ce-4a7a-9632-ea5a4022de8d\") " Sep 29 21:41:08 crc kubenswrapper[4911]: I0929 21:41:08.565839 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/66dda823-f2ce-4a7a-9632-ea5a4022de8d-etc-swift\") pod \"66dda823-f2ce-4a7a-9632-ea5a4022de8d\" (UID: \"66dda823-f2ce-4a7a-9632-ea5a4022de8d\") " Sep 29 21:41:08 crc kubenswrapper[4911]: I0929 21:41:08.565953 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bmldv\" (UniqueName: \"kubernetes.io/projected/66dda823-f2ce-4a7a-9632-ea5a4022de8d-kube-api-access-bmldv\") pod \"66dda823-f2ce-4a7a-9632-ea5a4022de8d\" (UID: \"66dda823-f2ce-4a7a-9632-ea5a4022de8d\") " Sep 29 21:41:08 crc kubenswrapper[4911]: I0929 21:41:08.566184 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/66dda823-f2ce-4a7a-9632-ea5a4022de8d-scripts\") pod \"66dda823-f2ce-4a7a-9632-ea5a4022de8d\" (UID: \"66dda823-f2ce-4a7a-9632-ea5a4022de8d\") " Sep 29 21:41:08 crc kubenswrapper[4911]: I0929 21:41:08.566734 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/66dda823-f2ce-4a7a-9632-ea5a4022de8d-swiftconf\") pod \"66dda823-f2ce-4a7a-9632-ea5a4022de8d\" (UID: \"66dda823-f2ce-4a7a-9632-ea5a4022de8d\") " Sep 29 21:41:08 crc kubenswrapper[4911]: I0929 21:41:08.566779 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/66dda823-f2ce-4a7a-9632-ea5a4022de8d-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "66dda823-f2ce-4a7a-9632-ea5a4022de8d" (UID: "66dda823-f2ce-4a7a-9632-ea5a4022de8d"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:41:08 crc kubenswrapper[4911]: I0929 21:41:08.566815 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66dda823-f2ce-4a7a-9632-ea5a4022de8d-combined-ca-bundle\") pod \"66dda823-f2ce-4a7a-9632-ea5a4022de8d\" (UID: \"66dda823-f2ce-4a7a-9632-ea5a4022de8d\") " Sep 29 21:41:08 crc kubenswrapper[4911]: I0929 21:41:08.566974 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/66dda823-f2ce-4a7a-9632-ea5a4022de8d-ring-data-devices\") pod \"66dda823-f2ce-4a7a-9632-ea5a4022de8d\" (UID: \"66dda823-f2ce-4a7a-9632-ea5a4022de8d\") " Sep 29 21:41:08 crc kubenswrapper[4911]: I0929 21:41:08.567447 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/656ced97-6f43-422c-9118-6aa119d0070c-additional-scripts\") pod \"ovn-controller-nm8s4-config-hnj4q\" (UID: \"656ced97-6f43-422c-9118-6aa119d0070c\") " pod="openstack/ovn-controller-nm8s4-config-hnj4q" Sep 29 21:41:08 crc kubenswrapper[4911]: I0929 21:41:08.567563 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j8wvr\" (UniqueName: \"kubernetes.io/projected/656ced97-6f43-422c-9118-6aa119d0070c-kube-api-access-j8wvr\") pod \"ovn-controller-nm8s4-config-hnj4q\" (UID: \"656ced97-6f43-422c-9118-6aa119d0070c\") " pod="openstack/ovn-controller-nm8s4-config-hnj4q" Sep 29 21:41:08 crc kubenswrapper[4911]: I0929 21:41:08.567650 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/66dda823-f2ce-4a7a-9632-ea5a4022de8d-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "66dda823-f2ce-4a7a-9632-ea5a4022de8d" (UID: "66dda823-f2ce-4a7a-9632-ea5a4022de8d"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:41:08 crc kubenswrapper[4911]: I0929 21:41:08.567775 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/656ced97-6f43-422c-9118-6aa119d0070c-var-log-ovn\") pod \"ovn-controller-nm8s4-config-hnj4q\" (UID: \"656ced97-6f43-422c-9118-6aa119d0070c\") " pod="openstack/ovn-controller-nm8s4-config-hnj4q" Sep 29 21:41:08 crc kubenswrapper[4911]: I0929 21:41:08.567823 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/656ced97-6f43-422c-9118-6aa119d0070c-scripts\") pod \"ovn-controller-nm8s4-config-hnj4q\" (UID: \"656ced97-6f43-422c-9118-6aa119d0070c\") " pod="openstack/ovn-controller-nm8s4-config-hnj4q" Sep 29 21:41:08 crc kubenswrapper[4911]: I0929 21:41:08.567903 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/656ced97-6f43-422c-9118-6aa119d0070c-var-run-ovn\") pod \"ovn-controller-nm8s4-config-hnj4q\" (UID: \"656ced97-6f43-422c-9118-6aa119d0070c\") " pod="openstack/ovn-controller-nm8s4-config-hnj4q" Sep 29 21:41:08 crc kubenswrapper[4911]: I0929 21:41:08.567941 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/656ced97-6f43-422c-9118-6aa119d0070c-var-run\") pod \"ovn-controller-nm8s4-config-hnj4q\" (UID: \"656ced97-6f43-422c-9118-6aa119d0070c\") " pod="openstack/ovn-controller-nm8s4-config-hnj4q" Sep 29 21:41:08 crc kubenswrapper[4911]: I0929 21:41:08.568047 4911 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/66dda823-f2ce-4a7a-9632-ea5a4022de8d-etc-swift\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:08 crc kubenswrapper[4911]: I0929 21:41:08.568063 4911 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/66dda823-f2ce-4a7a-9632-ea5a4022de8d-ring-data-devices\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:08 crc kubenswrapper[4911]: I0929 21:41:08.568139 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/656ced97-6f43-422c-9118-6aa119d0070c-var-run\") pod \"ovn-controller-nm8s4-config-hnj4q\" (UID: \"656ced97-6f43-422c-9118-6aa119d0070c\") " pod="openstack/ovn-controller-nm8s4-config-hnj4q" Sep 29 21:41:08 crc kubenswrapper[4911]: I0929 21:41:08.568138 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/656ced97-6f43-422c-9118-6aa119d0070c-var-log-ovn\") pod \"ovn-controller-nm8s4-config-hnj4q\" (UID: \"656ced97-6f43-422c-9118-6aa119d0070c\") " pod="openstack/ovn-controller-nm8s4-config-hnj4q" Sep 29 21:41:08 crc kubenswrapper[4911]: I0929 21:41:08.568190 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/656ced97-6f43-422c-9118-6aa119d0070c-var-run-ovn\") pod \"ovn-controller-nm8s4-config-hnj4q\" (UID: \"656ced97-6f43-422c-9118-6aa119d0070c\") " pod="openstack/ovn-controller-nm8s4-config-hnj4q" Sep 29 21:41:08 crc kubenswrapper[4911]: I0929 21:41:08.568317 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/656ced97-6f43-422c-9118-6aa119d0070c-additional-scripts\") pod \"ovn-controller-nm8s4-config-hnj4q\" (UID: \"656ced97-6f43-422c-9118-6aa119d0070c\") " pod="openstack/ovn-controller-nm8s4-config-hnj4q" Sep 29 21:41:08 crc kubenswrapper[4911]: I0929 21:41:08.573358 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/656ced97-6f43-422c-9118-6aa119d0070c-scripts\") pod \"ovn-controller-nm8s4-config-hnj4q\" (UID: \"656ced97-6f43-422c-9118-6aa119d0070c\") " pod="openstack/ovn-controller-nm8s4-config-hnj4q" Sep 29 21:41:08 crc kubenswrapper[4911]: I0929 21:41:08.573938 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/66dda823-f2ce-4a7a-9632-ea5a4022de8d-kube-api-access-bmldv" (OuterVolumeSpecName: "kube-api-access-bmldv") pod "66dda823-f2ce-4a7a-9632-ea5a4022de8d" (UID: "66dda823-f2ce-4a7a-9632-ea5a4022de8d"). InnerVolumeSpecName "kube-api-access-bmldv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:41:08 crc kubenswrapper[4911]: I0929 21:41:08.576824 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66dda823-f2ce-4a7a-9632-ea5a4022de8d-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "66dda823-f2ce-4a7a-9632-ea5a4022de8d" (UID: "66dda823-f2ce-4a7a-9632-ea5a4022de8d"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:41:08 crc kubenswrapper[4911]: I0929 21:41:08.590121 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66dda823-f2ce-4a7a-9632-ea5a4022de8d-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "66dda823-f2ce-4a7a-9632-ea5a4022de8d" (UID: "66dda823-f2ce-4a7a-9632-ea5a4022de8d"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:41:08 crc kubenswrapper[4911]: I0929 21:41:08.590715 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j8wvr\" (UniqueName: \"kubernetes.io/projected/656ced97-6f43-422c-9118-6aa119d0070c-kube-api-access-j8wvr\") pod \"ovn-controller-nm8s4-config-hnj4q\" (UID: \"656ced97-6f43-422c-9118-6aa119d0070c\") " pod="openstack/ovn-controller-nm8s4-config-hnj4q" Sep 29 21:41:08 crc kubenswrapper[4911]: I0929 21:41:08.595779 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66dda823-f2ce-4a7a-9632-ea5a4022de8d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "66dda823-f2ce-4a7a-9632-ea5a4022de8d" (UID: "66dda823-f2ce-4a7a-9632-ea5a4022de8d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:41:08 crc kubenswrapper[4911]: I0929 21:41:08.595974 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/66dda823-f2ce-4a7a-9632-ea5a4022de8d-scripts" (OuterVolumeSpecName: "scripts") pod "66dda823-f2ce-4a7a-9632-ea5a4022de8d" (UID: "66dda823-f2ce-4a7a-9632-ea5a4022de8d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:41:08 crc kubenswrapper[4911]: I0929 21:41:08.642334 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-4bbc-account-create-xdw6c"] Sep 29 21:41:08 crc kubenswrapper[4911]: W0929 21:41:08.646488 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode4bbd41e_cd3c_4d2e_aa8d_f1e474171118.slice/crio-0c919bcf8fa2dc15e75c797bc48a669d7d1bb64fd9918c2a804c540037e86f56 WatchSource:0}: Error finding container 0c919bcf8fa2dc15e75c797bc48a669d7d1bb64fd9918c2a804c540037e86f56: Status 404 returned error can't find the container with id 0c919bcf8fa2dc15e75c797bc48a669d7d1bb64fd9918c2a804c540037e86f56 Sep 29 21:41:08 crc kubenswrapper[4911]: I0929 21:41:08.671595 4911 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/66dda823-f2ce-4a7a-9632-ea5a4022de8d-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:08 crc kubenswrapper[4911]: I0929 21:41:08.671629 4911 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/66dda823-f2ce-4a7a-9632-ea5a4022de8d-swiftconf\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:08 crc kubenswrapper[4911]: I0929 21:41:08.671643 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66dda823-f2ce-4a7a-9632-ea5a4022de8d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:08 crc kubenswrapper[4911]: I0929 21:41:08.671658 4911 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/66dda823-f2ce-4a7a-9632-ea5a4022de8d-dispersionconf\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:08 crc kubenswrapper[4911]: I0929 21:41:08.671670 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bmldv\" (UniqueName: \"kubernetes.io/projected/66dda823-f2ce-4a7a-9632-ea5a4022de8d-kube-api-access-bmldv\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:08 crc kubenswrapper[4911]: I0929 21:41:08.782820 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-nm8s4-config-hnj4q" Sep 29 21:41:08 crc kubenswrapper[4911]: I0929 21:41:08.795119 4911 generic.go:334] "Generic (PLEG): container finished" podID="9759766a-e408-4d42-b84d-8acc6a3de4ee" containerID="184725e6f5a973a9f3a15441519c0ea6d0a20973970d107a5c3a299e42dfd573" exitCode=0 Sep 29 21:41:08 crc kubenswrapper[4911]: I0929 21:41:08.795184 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-33c5-account-create-shdxk" event={"ID":"9759766a-e408-4d42-b84d-8acc6a3de4ee","Type":"ContainerDied","Data":"184725e6f5a973a9f3a15441519c0ea6d0a20973970d107a5c3a299e42dfd573"} Sep 29 21:41:08 crc kubenswrapper[4911]: I0929 21:41:08.795211 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-33c5-account-create-shdxk" event={"ID":"9759766a-e408-4d42-b84d-8acc6a3de4ee","Type":"ContainerStarted","Data":"cf361de61b96f2431b181f9d61992f0fd409573414a7511ee6f24feccbaf9fd5"} Sep 29 21:41:08 crc kubenswrapper[4911]: I0929 21:41:08.797775 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-878rf" event={"ID":"66dda823-f2ce-4a7a-9632-ea5a4022de8d","Type":"ContainerDied","Data":"83b68dd839a5d87aca43df958cc7cd939dfba565dc2e0892d9891c0acece0d5b"} Sep 29 21:41:08 crc kubenswrapper[4911]: I0929 21:41:08.797827 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="83b68dd839a5d87aca43df958cc7cd939dfba565dc2e0892d9891c0acece0d5b" Sep 29 21:41:08 crc kubenswrapper[4911]: I0929 21:41:08.797871 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-878rf" Sep 29 21:41:08 crc kubenswrapper[4911]: I0929 21:41:08.811982 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"54bedb11-6943-4e34-a221-8dbd2cfd5eee","Type":"ContainerStarted","Data":"4a91ba752e02ad019e74614fab8db0819aae4221643a836bef5f3dd960c39ac3"} Sep 29 21:41:08 crc kubenswrapper[4911]: I0929 21:41:08.821259 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:41:08 crc kubenswrapper[4911]: I0929 21:41:08.828545 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"16704d0f-ad69-4cc9-890a-77c268d78151","Type":"ContainerStarted","Data":"69fddeba00ec9a43661c93507c63cebfd0cabbe5003764a2097f0fb5cbe78287"} Sep 29 21:41:08 crc kubenswrapper[4911]: I0929 21:41:08.828756 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Sep 29 21:41:08 crc kubenswrapper[4911]: I0929 21:41:08.832406 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-4bbc-account-create-xdw6c" event={"ID":"e4bbd41e-cd3c-4d2e-aa8d-f1e474171118","Type":"ContainerStarted","Data":"0c919bcf8fa2dc15e75c797bc48a669d7d1bb64fd9918c2a804c540037e86f56"} Sep 29 21:41:08 crc kubenswrapper[4911]: I0929 21:41:08.856028 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=46.601906647 podStartE2EDuration="55.855994s" podCreationTimestamp="2025-09-29 21:40:13 +0000 UTC" firstStartedPulling="2025-09-29 21:40:24.554856317 +0000 UTC m=+902.531968988" lastFinishedPulling="2025-09-29 21:40:33.80894367 +0000 UTC m=+911.786056341" observedRunningTime="2025-09-29 21:41:08.854914627 +0000 UTC m=+946.832027308" watchObservedRunningTime="2025-09-29 21:41:08.855994 +0000 UTC m=+946.833106691" Sep 29 21:41:08 crc kubenswrapper[4911]: I0929 21:41:08.886412 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=47.627896265 podStartE2EDuration="56.886390136s" podCreationTimestamp="2025-09-29 21:40:12 +0000 UTC" firstStartedPulling="2025-09-29 21:40:24.559505371 +0000 UTC m=+902.536618042" lastFinishedPulling="2025-09-29 21:40:33.817999242 +0000 UTC m=+911.795111913" observedRunningTime="2025-09-29 21:41:08.882422433 +0000 UTC m=+946.859535114" watchObservedRunningTime="2025-09-29 21:41:08.886390136 +0000 UTC m=+946.863502827" Sep 29 21:41:09 crc kubenswrapper[4911]: I0929 21:41:09.255221 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-nm8s4-config-hnj4q"] Sep 29 21:41:09 crc kubenswrapper[4911]: W0929 21:41:09.259858 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod656ced97_6f43_422c_9118_6aa119d0070c.slice/crio-60a8257f689c1e70552af847c019afa55be01221cc9dc5a1d3f34efcedc6f6fe WatchSource:0}: Error finding container 60a8257f689c1e70552af847c019afa55be01221cc9dc5a1d3f34efcedc6f6fe: Status 404 returned error can't find the container with id 60a8257f689c1e70552af847c019afa55be01221cc9dc5a1d3f34efcedc6f6fe Sep 29 21:41:09 crc kubenswrapper[4911]: I0929 21:41:09.841635 4911 generic.go:334] "Generic (PLEG): container finished" podID="e4bbd41e-cd3c-4d2e-aa8d-f1e474171118" containerID="ba219af358822d0e7bb0b79315aaafe1e25c191120f72f2a4bc3832aaa6e68b8" exitCode=0 Sep 29 21:41:09 crc kubenswrapper[4911]: I0929 21:41:09.841707 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-4bbc-account-create-xdw6c" event={"ID":"e4bbd41e-cd3c-4d2e-aa8d-f1e474171118","Type":"ContainerDied","Data":"ba219af358822d0e7bb0b79315aaafe1e25c191120f72f2a4bc3832aaa6e68b8"} Sep 29 21:41:09 crc kubenswrapper[4911]: I0929 21:41:09.843554 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-nm8s4-config-hnj4q" event={"ID":"656ced97-6f43-422c-9118-6aa119d0070c","Type":"ContainerStarted","Data":"60a8257f689c1e70552af847c019afa55be01221cc9dc5a1d3f34efcedc6f6fe"} Sep 29 21:41:09 crc kubenswrapper[4911]: I0929 21:41:09.846356 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"00a836d8-70df-4583-948f-e6869d77e432","Type":"ContainerStarted","Data":"91ffcf77dd4a58b463926e3e96f8f5edb0d0f7211176fedda71c50ddeba1bf3b"} Sep 29 21:41:09 crc kubenswrapper[4911]: I0929 21:41:09.846382 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"00a836d8-70df-4583-948f-e6869d77e432","Type":"ContainerStarted","Data":"83f7fabfbe0bc6951c29cfa72d82c1572c340a8e970c45763edf0458e83dde74"} Sep 29 21:41:10 crc kubenswrapper[4911]: I0929 21:41:10.252237 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-33c5-account-create-shdxk" Sep 29 21:41:10 crc kubenswrapper[4911]: I0929 21:41:10.404484 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n2rbl\" (UniqueName: \"kubernetes.io/projected/9759766a-e408-4d42-b84d-8acc6a3de4ee-kube-api-access-n2rbl\") pod \"9759766a-e408-4d42-b84d-8acc6a3de4ee\" (UID: \"9759766a-e408-4d42-b84d-8acc6a3de4ee\") " Sep 29 21:41:10 crc kubenswrapper[4911]: I0929 21:41:10.410045 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9759766a-e408-4d42-b84d-8acc6a3de4ee-kube-api-access-n2rbl" (OuterVolumeSpecName: "kube-api-access-n2rbl") pod "9759766a-e408-4d42-b84d-8acc6a3de4ee" (UID: "9759766a-e408-4d42-b84d-8acc6a3de4ee"). InnerVolumeSpecName "kube-api-access-n2rbl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:41:10 crc kubenswrapper[4911]: I0929 21:41:10.506545 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n2rbl\" (UniqueName: \"kubernetes.io/projected/9759766a-e408-4d42-b84d-8acc6a3de4ee-kube-api-access-n2rbl\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:10 crc kubenswrapper[4911]: I0929 21:41:10.855939 4911 generic.go:334] "Generic (PLEG): container finished" podID="656ced97-6f43-422c-9118-6aa119d0070c" containerID="584e8c4c181197a4a1582a896356294deda7ef8be81123dfee5064f20966e9ff" exitCode=0 Sep 29 21:41:10 crc kubenswrapper[4911]: I0929 21:41:10.855990 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-nm8s4-config-hnj4q" event={"ID":"656ced97-6f43-422c-9118-6aa119d0070c","Type":"ContainerDied","Data":"584e8c4c181197a4a1582a896356294deda7ef8be81123dfee5064f20966e9ff"} Sep 29 21:41:10 crc kubenswrapper[4911]: I0929 21:41:10.860079 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"00a836d8-70df-4583-948f-e6869d77e432","Type":"ContainerStarted","Data":"4272108f52f01a5ce081192a49c6ada168740cebafdc1d32d48a6bf1d04e4e4e"} Sep 29 21:41:10 crc kubenswrapper[4911]: I0929 21:41:10.860265 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"00a836d8-70df-4583-948f-e6869d77e432","Type":"ContainerStarted","Data":"30f1d55c93ff384a04d043a84103ccc543e6dc42580c2867d783ed1e060fcf95"} Sep 29 21:41:10 crc kubenswrapper[4911]: I0929 21:41:10.861968 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-33c5-account-create-shdxk" event={"ID":"9759766a-e408-4d42-b84d-8acc6a3de4ee","Type":"ContainerDied","Data":"cf361de61b96f2431b181f9d61992f0fd409573414a7511ee6f24feccbaf9fd5"} Sep 29 21:41:10 crc kubenswrapper[4911]: I0929 21:41:10.862004 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cf361de61b96f2431b181f9d61992f0fd409573414a7511ee6f24feccbaf9fd5" Sep 29 21:41:10 crc kubenswrapper[4911]: I0929 21:41:10.862221 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-33c5-account-create-shdxk" Sep 29 21:41:11 crc kubenswrapper[4911]: I0929 21:41:11.450874 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-4bbc-account-create-xdw6c" Sep 29 21:41:11 crc kubenswrapper[4911]: I0929 21:41:11.522736 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5j6sj\" (UniqueName: \"kubernetes.io/projected/e4bbd41e-cd3c-4d2e-aa8d-f1e474171118-kube-api-access-5j6sj\") pod \"e4bbd41e-cd3c-4d2e-aa8d-f1e474171118\" (UID: \"e4bbd41e-cd3c-4d2e-aa8d-f1e474171118\") " Sep 29 21:41:11 crc kubenswrapper[4911]: I0929 21:41:11.533661 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e4bbd41e-cd3c-4d2e-aa8d-f1e474171118-kube-api-access-5j6sj" (OuterVolumeSpecName: "kube-api-access-5j6sj") pod "e4bbd41e-cd3c-4d2e-aa8d-f1e474171118" (UID: "e4bbd41e-cd3c-4d2e-aa8d-f1e474171118"). InnerVolumeSpecName "kube-api-access-5j6sj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:41:11 crc kubenswrapper[4911]: I0929 21:41:11.624847 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5j6sj\" (UniqueName: \"kubernetes.io/projected/e4bbd41e-cd3c-4d2e-aa8d-f1e474171118-kube-api-access-5j6sj\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:11 crc kubenswrapper[4911]: I0929 21:41:11.876634 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-4bbc-account-create-xdw6c" event={"ID":"e4bbd41e-cd3c-4d2e-aa8d-f1e474171118","Type":"ContainerDied","Data":"0c919bcf8fa2dc15e75c797bc48a669d7d1bb64fd9918c2a804c540037e86f56"} Sep 29 21:41:11 crc kubenswrapper[4911]: I0929 21:41:11.876681 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0c919bcf8fa2dc15e75c797bc48a669d7d1bb64fd9918c2a804c540037e86f56" Sep 29 21:41:11 crc kubenswrapper[4911]: I0929 21:41:11.876661 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-4bbc-account-create-xdw6c" Sep 29 21:41:11 crc kubenswrapper[4911]: I0929 21:41:11.882362 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"00a836d8-70df-4583-948f-e6869d77e432","Type":"ContainerStarted","Data":"c1f2124ea5a469c5630df54a09cce332de0fea4feaa24dc4623887ecffff8536"} Sep 29 21:41:11 crc kubenswrapper[4911]: I0929 21:41:11.882424 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"00a836d8-70df-4583-948f-e6869d77e432","Type":"ContainerStarted","Data":"2fd2e0055b6b19c515411ac10195e37029648824a9d023867e8618fdf77f24c4"} Sep 29 21:41:12 crc kubenswrapper[4911]: I0929 21:41:12.177371 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-nm8s4-config-hnj4q" Sep 29 21:41:12 crc kubenswrapper[4911]: I0929 21:41:12.254569 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/656ced97-6f43-422c-9118-6aa119d0070c-var-log-ovn\") pod \"656ced97-6f43-422c-9118-6aa119d0070c\" (UID: \"656ced97-6f43-422c-9118-6aa119d0070c\") " Sep 29 21:41:12 crc kubenswrapper[4911]: I0929 21:41:12.254648 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/656ced97-6f43-422c-9118-6aa119d0070c-var-run\") pod \"656ced97-6f43-422c-9118-6aa119d0070c\" (UID: \"656ced97-6f43-422c-9118-6aa119d0070c\") " Sep 29 21:41:12 crc kubenswrapper[4911]: I0929 21:41:12.254713 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j8wvr\" (UniqueName: \"kubernetes.io/projected/656ced97-6f43-422c-9118-6aa119d0070c-kube-api-access-j8wvr\") pod \"656ced97-6f43-422c-9118-6aa119d0070c\" (UID: \"656ced97-6f43-422c-9118-6aa119d0070c\") " Sep 29 21:41:12 crc kubenswrapper[4911]: I0929 21:41:12.254775 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/656ced97-6f43-422c-9118-6aa119d0070c-scripts\") pod \"656ced97-6f43-422c-9118-6aa119d0070c\" (UID: \"656ced97-6f43-422c-9118-6aa119d0070c\") " Sep 29 21:41:12 crc kubenswrapper[4911]: I0929 21:41:12.254944 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/656ced97-6f43-422c-9118-6aa119d0070c-additional-scripts\") pod \"656ced97-6f43-422c-9118-6aa119d0070c\" (UID: \"656ced97-6f43-422c-9118-6aa119d0070c\") " Sep 29 21:41:12 crc kubenswrapper[4911]: I0929 21:41:12.254980 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/656ced97-6f43-422c-9118-6aa119d0070c-var-run-ovn\") pod \"656ced97-6f43-422c-9118-6aa119d0070c\" (UID: \"656ced97-6f43-422c-9118-6aa119d0070c\") " Sep 29 21:41:12 crc kubenswrapper[4911]: I0929 21:41:12.255402 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/656ced97-6f43-422c-9118-6aa119d0070c-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "656ced97-6f43-422c-9118-6aa119d0070c" (UID: "656ced97-6f43-422c-9118-6aa119d0070c"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 21:41:12 crc kubenswrapper[4911]: I0929 21:41:12.255403 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/656ced97-6f43-422c-9118-6aa119d0070c-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "656ced97-6f43-422c-9118-6aa119d0070c" (UID: "656ced97-6f43-422c-9118-6aa119d0070c"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 21:41:12 crc kubenswrapper[4911]: I0929 21:41:12.255436 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/656ced97-6f43-422c-9118-6aa119d0070c-var-run" (OuterVolumeSpecName: "var-run") pod "656ced97-6f43-422c-9118-6aa119d0070c" (UID: "656ced97-6f43-422c-9118-6aa119d0070c"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 21:41:12 crc kubenswrapper[4911]: I0929 21:41:12.256064 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/656ced97-6f43-422c-9118-6aa119d0070c-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "656ced97-6f43-422c-9118-6aa119d0070c" (UID: "656ced97-6f43-422c-9118-6aa119d0070c"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:41:12 crc kubenswrapper[4911]: I0929 21:41:12.256311 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/656ced97-6f43-422c-9118-6aa119d0070c-scripts" (OuterVolumeSpecName: "scripts") pod "656ced97-6f43-422c-9118-6aa119d0070c" (UID: "656ced97-6f43-422c-9118-6aa119d0070c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:41:12 crc kubenswrapper[4911]: I0929 21:41:12.261415 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/656ced97-6f43-422c-9118-6aa119d0070c-kube-api-access-j8wvr" (OuterVolumeSpecName: "kube-api-access-j8wvr") pod "656ced97-6f43-422c-9118-6aa119d0070c" (UID: "656ced97-6f43-422c-9118-6aa119d0070c"). InnerVolumeSpecName "kube-api-access-j8wvr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:41:12 crc kubenswrapper[4911]: I0929 21:41:12.356521 4911 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/656ced97-6f43-422c-9118-6aa119d0070c-var-log-ovn\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:12 crc kubenswrapper[4911]: I0929 21:41:12.356555 4911 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/656ced97-6f43-422c-9118-6aa119d0070c-var-run\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:12 crc kubenswrapper[4911]: I0929 21:41:12.356576 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j8wvr\" (UniqueName: \"kubernetes.io/projected/656ced97-6f43-422c-9118-6aa119d0070c-kube-api-access-j8wvr\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:12 crc kubenswrapper[4911]: I0929 21:41:12.356586 4911 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/656ced97-6f43-422c-9118-6aa119d0070c-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:12 crc kubenswrapper[4911]: I0929 21:41:12.356595 4911 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/656ced97-6f43-422c-9118-6aa119d0070c-additional-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:12 crc kubenswrapper[4911]: I0929 21:41:12.356603 4911 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/656ced97-6f43-422c-9118-6aa119d0070c-var-run-ovn\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:12 crc kubenswrapper[4911]: I0929 21:41:12.892616 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-nm8s4-config-hnj4q" event={"ID":"656ced97-6f43-422c-9118-6aa119d0070c","Type":"ContainerDied","Data":"60a8257f689c1e70552af847c019afa55be01221cc9dc5a1d3f34efcedc6f6fe"} Sep 29 21:41:12 crc kubenswrapper[4911]: I0929 21:41:12.893162 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="60a8257f689c1e70552af847c019afa55be01221cc9dc5a1d3f34efcedc6f6fe" Sep 29 21:41:12 crc kubenswrapper[4911]: I0929 21:41:12.892674 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-nm8s4-config-hnj4q" Sep 29 21:41:12 crc kubenswrapper[4911]: I0929 21:41:12.897743 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"00a836d8-70df-4583-948f-e6869d77e432","Type":"ContainerStarted","Data":"1dd86e82f30dab89a776b6b43a8ccc14dd3c78b44e861a5f793f12bcab31c4a5"} Sep 29 21:41:12 crc kubenswrapper[4911]: I0929 21:41:12.897810 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"00a836d8-70df-4583-948f-e6869d77e432","Type":"ContainerStarted","Data":"6eec964eafd0ba8a54824ff906252f2fba5b38a9811ee3553686f79d4216f773"} Sep 29 21:41:12 crc kubenswrapper[4911]: I0929 21:41:12.979319 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-w6kfd"] Sep 29 21:41:12 crc kubenswrapper[4911]: E0929 21:41:12.979871 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="656ced97-6f43-422c-9118-6aa119d0070c" containerName="ovn-config" Sep 29 21:41:12 crc kubenswrapper[4911]: I0929 21:41:12.979894 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="656ced97-6f43-422c-9118-6aa119d0070c" containerName="ovn-config" Sep 29 21:41:12 crc kubenswrapper[4911]: E0929 21:41:12.979919 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66dda823-f2ce-4a7a-9632-ea5a4022de8d" containerName="swift-ring-rebalance" Sep 29 21:41:12 crc kubenswrapper[4911]: I0929 21:41:12.979926 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="66dda823-f2ce-4a7a-9632-ea5a4022de8d" containerName="swift-ring-rebalance" Sep 29 21:41:12 crc kubenswrapper[4911]: E0929 21:41:12.979944 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9759766a-e408-4d42-b84d-8acc6a3de4ee" containerName="mariadb-account-create" Sep 29 21:41:12 crc kubenswrapper[4911]: I0929 21:41:12.979952 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="9759766a-e408-4d42-b84d-8acc6a3de4ee" containerName="mariadb-account-create" Sep 29 21:41:12 crc kubenswrapper[4911]: E0929 21:41:12.979965 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4bbd41e-cd3c-4d2e-aa8d-f1e474171118" containerName="mariadb-account-create" Sep 29 21:41:12 crc kubenswrapper[4911]: I0929 21:41:12.979972 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4bbd41e-cd3c-4d2e-aa8d-f1e474171118" containerName="mariadb-account-create" Sep 29 21:41:12 crc kubenswrapper[4911]: I0929 21:41:12.980319 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="656ced97-6f43-422c-9118-6aa119d0070c" containerName="ovn-config" Sep 29 21:41:12 crc kubenswrapper[4911]: I0929 21:41:12.980389 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="e4bbd41e-cd3c-4d2e-aa8d-f1e474171118" containerName="mariadb-account-create" Sep 29 21:41:12 crc kubenswrapper[4911]: I0929 21:41:12.980422 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="9759766a-e408-4d42-b84d-8acc6a3de4ee" containerName="mariadb-account-create" Sep 29 21:41:12 crc kubenswrapper[4911]: I0929 21:41:12.980441 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="66dda823-f2ce-4a7a-9632-ea5a4022de8d" containerName="swift-ring-rebalance" Sep 29 21:41:12 crc kubenswrapper[4911]: I0929 21:41:12.981192 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-w6kfd" Sep 29 21:41:12 crc kubenswrapper[4911]: I0929 21:41:12.987326 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-lw9bf" Sep 29 21:41:12 crc kubenswrapper[4911]: I0929 21:41:12.988541 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Sep 29 21:41:12 crc kubenswrapper[4911]: I0929 21:41:12.994424 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-w6kfd"] Sep 29 21:41:13 crc kubenswrapper[4911]: I0929 21:41:13.069582 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xwwzq\" (UniqueName: \"kubernetes.io/projected/3e8211a0-b5a5-4dd2-8b18-6616202ebe45-kube-api-access-xwwzq\") pod \"glance-db-sync-w6kfd\" (UID: \"3e8211a0-b5a5-4dd2-8b18-6616202ebe45\") " pod="openstack/glance-db-sync-w6kfd" Sep 29 21:41:13 crc kubenswrapper[4911]: I0929 21:41:13.069665 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e8211a0-b5a5-4dd2-8b18-6616202ebe45-config-data\") pod \"glance-db-sync-w6kfd\" (UID: \"3e8211a0-b5a5-4dd2-8b18-6616202ebe45\") " pod="openstack/glance-db-sync-w6kfd" Sep 29 21:41:13 crc kubenswrapper[4911]: I0929 21:41:13.069701 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e8211a0-b5a5-4dd2-8b18-6616202ebe45-combined-ca-bundle\") pod \"glance-db-sync-w6kfd\" (UID: \"3e8211a0-b5a5-4dd2-8b18-6616202ebe45\") " pod="openstack/glance-db-sync-w6kfd" Sep 29 21:41:13 crc kubenswrapper[4911]: I0929 21:41:13.069757 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/3e8211a0-b5a5-4dd2-8b18-6616202ebe45-db-sync-config-data\") pod \"glance-db-sync-w6kfd\" (UID: \"3e8211a0-b5a5-4dd2-8b18-6616202ebe45\") " pod="openstack/glance-db-sync-w6kfd" Sep 29 21:41:13 crc kubenswrapper[4911]: I0929 21:41:13.127570 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-nm8s4" Sep 29 21:41:13 crc kubenswrapper[4911]: I0929 21:41:13.171358 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/3e8211a0-b5a5-4dd2-8b18-6616202ebe45-db-sync-config-data\") pod \"glance-db-sync-w6kfd\" (UID: \"3e8211a0-b5a5-4dd2-8b18-6616202ebe45\") " pod="openstack/glance-db-sync-w6kfd" Sep 29 21:41:13 crc kubenswrapper[4911]: I0929 21:41:13.171442 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xwwzq\" (UniqueName: \"kubernetes.io/projected/3e8211a0-b5a5-4dd2-8b18-6616202ebe45-kube-api-access-xwwzq\") pod \"glance-db-sync-w6kfd\" (UID: \"3e8211a0-b5a5-4dd2-8b18-6616202ebe45\") " pod="openstack/glance-db-sync-w6kfd" Sep 29 21:41:13 crc kubenswrapper[4911]: I0929 21:41:13.171486 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e8211a0-b5a5-4dd2-8b18-6616202ebe45-config-data\") pod \"glance-db-sync-w6kfd\" (UID: \"3e8211a0-b5a5-4dd2-8b18-6616202ebe45\") " pod="openstack/glance-db-sync-w6kfd" Sep 29 21:41:13 crc kubenswrapper[4911]: I0929 21:41:13.171516 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e8211a0-b5a5-4dd2-8b18-6616202ebe45-combined-ca-bundle\") pod \"glance-db-sync-w6kfd\" (UID: \"3e8211a0-b5a5-4dd2-8b18-6616202ebe45\") " pod="openstack/glance-db-sync-w6kfd" Sep 29 21:41:13 crc kubenswrapper[4911]: I0929 21:41:13.177914 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e8211a0-b5a5-4dd2-8b18-6616202ebe45-combined-ca-bundle\") pod \"glance-db-sync-w6kfd\" (UID: \"3e8211a0-b5a5-4dd2-8b18-6616202ebe45\") " pod="openstack/glance-db-sync-w6kfd" Sep 29 21:41:13 crc kubenswrapper[4911]: I0929 21:41:13.179159 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/3e8211a0-b5a5-4dd2-8b18-6616202ebe45-db-sync-config-data\") pod \"glance-db-sync-w6kfd\" (UID: \"3e8211a0-b5a5-4dd2-8b18-6616202ebe45\") " pod="openstack/glance-db-sync-w6kfd" Sep 29 21:41:13 crc kubenswrapper[4911]: I0929 21:41:13.195593 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e8211a0-b5a5-4dd2-8b18-6616202ebe45-config-data\") pod \"glance-db-sync-w6kfd\" (UID: \"3e8211a0-b5a5-4dd2-8b18-6616202ebe45\") " pod="openstack/glance-db-sync-w6kfd" Sep 29 21:41:13 crc kubenswrapper[4911]: I0929 21:41:13.211044 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xwwzq\" (UniqueName: \"kubernetes.io/projected/3e8211a0-b5a5-4dd2-8b18-6616202ebe45-kube-api-access-xwwzq\") pod \"glance-db-sync-w6kfd\" (UID: \"3e8211a0-b5a5-4dd2-8b18-6616202ebe45\") " pod="openstack/glance-db-sync-w6kfd" Sep 29 21:41:13 crc kubenswrapper[4911]: I0929 21:41:13.280436 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-nm8s4-config-hnj4q"] Sep 29 21:41:13 crc kubenswrapper[4911]: I0929 21:41:13.291508 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-nm8s4-config-hnj4q"] Sep 29 21:41:13 crc kubenswrapper[4911]: I0929 21:41:13.302859 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-w6kfd" Sep 29 21:41:14 crc kubenswrapper[4911]: I0929 21:41:14.715374 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="656ced97-6f43-422c-9118-6aa119d0070c" path="/var/lib/kubelet/pods/656ced97-6f43-422c-9118-6aa119d0070c/volumes" Sep 29 21:41:15 crc kubenswrapper[4911]: I0929 21:41:15.508439 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-w6kfd"] Sep 29 21:41:15 crc kubenswrapper[4911]: W0929 21:41:15.513806 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3e8211a0_b5a5_4dd2_8b18_6616202ebe45.slice/crio-0d928d8b55dce8206c17b825e9482248b34c876f6a4b29932eb32690272571cf WatchSource:0}: Error finding container 0d928d8b55dce8206c17b825e9482248b34c876f6a4b29932eb32690272571cf: Status 404 returned error can't find the container with id 0d928d8b55dce8206c17b825e9482248b34c876f6a4b29932eb32690272571cf Sep 29 21:41:15 crc kubenswrapper[4911]: I0929 21:41:15.927717 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"00a836d8-70df-4583-948f-e6869d77e432","Type":"ContainerStarted","Data":"ec04c9837df25748e1486fc58d9f5f48b96998af9a83ccb5506385099c17b3e2"} Sep 29 21:41:15 crc kubenswrapper[4911]: I0929 21:41:15.928080 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"00a836d8-70df-4583-948f-e6869d77e432","Type":"ContainerStarted","Data":"e99fd8608bdf72166e2bccd2bc07377152bbf86c7fce3152fab96158a04835d8"} Sep 29 21:41:15 crc kubenswrapper[4911]: I0929 21:41:15.928094 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"00a836d8-70df-4583-948f-e6869d77e432","Type":"ContainerStarted","Data":"f7df28c39b01933d2ef0b9d06663dc611d3205b743dace0d4abbc35d71eb4789"} Sep 29 21:41:15 crc kubenswrapper[4911]: I0929 21:41:15.928104 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"00a836d8-70df-4583-948f-e6869d77e432","Type":"ContainerStarted","Data":"8997d798e21efa62f1bdbafed3ff1746d0326f51f6a1d5e041e076ad01b97da0"} Sep 29 21:41:15 crc kubenswrapper[4911]: I0929 21:41:15.928115 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"00a836d8-70df-4583-948f-e6869d77e432","Type":"ContainerStarted","Data":"278d72f3fee96534a30d70ec8ba1e30b59088ed9d3a4231c3e54c6423e276bff"} Sep 29 21:41:15 crc kubenswrapper[4911]: I0929 21:41:15.929390 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-w6kfd" event={"ID":"3e8211a0-b5a5-4dd2-8b18-6616202ebe45","Type":"ContainerStarted","Data":"0d928d8b55dce8206c17b825e9482248b34c876f6a4b29932eb32690272571cf"} Sep 29 21:41:16 crc kubenswrapper[4911]: I0929 21:41:16.979947 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"00a836d8-70df-4583-948f-e6869d77e432","Type":"ContainerStarted","Data":"d3f27f759e630508263a2a4c1804a4ba3035a31cade4029d87a1c38c735d6a69"} Sep 29 21:41:16 crc kubenswrapper[4911]: I0929 21:41:16.979991 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"00a836d8-70df-4583-948f-e6869d77e432","Type":"ContainerStarted","Data":"25059d01cd92f809f16b8f2f3a73862286a61aa9862d66a51270ed3881e92401"} Sep 29 21:41:17 crc kubenswrapper[4911]: I0929 21:41:17.019112 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=20.086654016 podStartE2EDuration="28.019089974s" podCreationTimestamp="2025-09-29 21:40:49 +0000 UTC" firstStartedPulling="2025-09-29 21:41:07.038537202 +0000 UTC m=+945.015649883" lastFinishedPulling="2025-09-29 21:41:14.97097317 +0000 UTC m=+952.948085841" observedRunningTime="2025-09-29 21:41:17.006897705 +0000 UTC m=+954.984010396" watchObservedRunningTime="2025-09-29 21:41:17.019089974 +0000 UTC m=+954.996202655" Sep 29 21:41:17 crc kubenswrapper[4911]: I0929 21:41:17.270782 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-8k779"] Sep 29 21:41:17 crc kubenswrapper[4911]: I0929 21:41:17.273140 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77585f5f8c-8k779" Sep 29 21:41:17 crc kubenswrapper[4911]: I0929 21:41:17.275212 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Sep 29 21:41:17 crc kubenswrapper[4911]: I0929 21:41:17.285282 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-8k779"] Sep 29 21:41:17 crc kubenswrapper[4911]: I0929 21:41:17.373739 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b9c4944f-6978-483c-a49c-5e0dc98ca4af-ovsdbserver-sb\") pod \"dnsmasq-dns-77585f5f8c-8k779\" (UID: \"b9c4944f-6978-483c-a49c-5e0dc98ca4af\") " pod="openstack/dnsmasq-dns-77585f5f8c-8k779" Sep 29 21:41:17 crc kubenswrapper[4911]: I0929 21:41:17.373909 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b9c4944f-6978-483c-a49c-5e0dc98ca4af-config\") pod \"dnsmasq-dns-77585f5f8c-8k779\" (UID: \"b9c4944f-6978-483c-a49c-5e0dc98ca4af\") " pod="openstack/dnsmasq-dns-77585f5f8c-8k779" Sep 29 21:41:17 crc kubenswrapper[4911]: I0929 21:41:17.373936 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b9c4944f-6978-483c-a49c-5e0dc98ca4af-dns-svc\") pod \"dnsmasq-dns-77585f5f8c-8k779\" (UID: \"b9c4944f-6978-483c-a49c-5e0dc98ca4af\") " pod="openstack/dnsmasq-dns-77585f5f8c-8k779" Sep 29 21:41:17 crc kubenswrapper[4911]: I0929 21:41:17.373975 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f59hd\" (UniqueName: \"kubernetes.io/projected/b9c4944f-6978-483c-a49c-5e0dc98ca4af-kube-api-access-f59hd\") pod \"dnsmasq-dns-77585f5f8c-8k779\" (UID: \"b9c4944f-6978-483c-a49c-5e0dc98ca4af\") " pod="openstack/dnsmasq-dns-77585f5f8c-8k779" Sep 29 21:41:17 crc kubenswrapper[4911]: I0929 21:41:17.374007 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b9c4944f-6978-483c-a49c-5e0dc98ca4af-dns-swift-storage-0\") pod \"dnsmasq-dns-77585f5f8c-8k779\" (UID: \"b9c4944f-6978-483c-a49c-5e0dc98ca4af\") " pod="openstack/dnsmasq-dns-77585f5f8c-8k779" Sep 29 21:41:17 crc kubenswrapper[4911]: I0929 21:41:17.374070 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b9c4944f-6978-483c-a49c-5e0dc98ca4af-ovsdbserver-nb\") pod \"dnsmasq-dns-77585f5f8c-8k779\" (UID: \"b9c4944f-6978-483c-a49c-5e0dc98ca4af\") " pod="openstack/dnsmasq-dns-77585f5f8c-8k779" Sep 29 21:41:17 crc kubenswrapper[4911]: I0929 21:41:17.475991 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b9c4944f-6978-483c-a49c-5e0dc98ca4af-config\") pod \"dnsmasq-dns-77585f5f8c-8k779\" (UID: \"b9c4944f-6978-483c-a49c-5e0dc98ca4af\") " pod="openstack/dnsmasq-dns-77585f5f8c-8k779" Sep 29 21:41:17 crc kubenswrapper[4911]: I0929 21:41:17.478445 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b9c4944f-6978-483c-a49c-5e0dc98ca4af-dns-svc\") pod \"dnsmasq-dns-77585f5f8c-8k779\" (UID: \"b9c4944f-6978-483c-a49c-5e0dc98ca4af\") " pod="openstack/dnsmasq-dns-77585f5f8c-8k779" Sep 29 21:41:17 crc kubenswrapper[4911]: I0929 21:41:17.478487 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f59hd\" (UniqueName: \"kubernetes.io/projected/b9c4944f-6978-483c-a49c-5e0dc98ca4af-kube-api-access-f59hd\") pod \"dnsmasq-dns-77585f5f8c-8k779\" (UID: \"b9c4944f-6978-483c-a49c-5e0dc98ca4af\") " pod="openstack/dnsmasq-dns-77585f5f8c-8k779" Sep 29 21:41:17 crc kubenswrapper[4911]: I0929 21:41:17.478523 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b9c4944f-6978-483c-a49c-5e0dc98ca4af-dns-swift-storage-0\") pod \"dnsmasq-dns-77585f5f8c-8k779\" (UID: \"b9c4944f-6978-483c-a49c-5e0dc98ca4af\") " pod="openstack/dnsmasq-dns-77585f5f8c-8k779" Sep 29 21:41:17 crc kubenswrapper[4911]: I0929 21:41:17.478573 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b9c4944f-6978-483c-a49c-5e0dc98ca4af-ovsdbserver-nb\") pod \"dnsmasq-dns-77585f5f8c-8k779\" (UID: \"b9c4944f-6978-483c-a49c-5e0dc98ca4af\") " pod="openstack/dnsmasq-dns-77585f5f8c-8k779" Sep 29 21:41:17 crc kubenswrapper[4911]: I0929 21:41:17.478617 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b9c4944f-6978-483c-a49c-5e0dc98ca4af-ovsdbserver-sb\") pod \"dnsmasq-dns-77585f5f8c-8k779\" (UID: \"b9c4944f-6978-483c-a49c-5e0dc98ca4af\") " pod="openstack/dnsmasq-dns-77585f5f8c-8k779" Sep 29 21:41:17 crc kubenswrapper[4911]: I0929 21:41:17.477185 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b9c4944f-6978-483c-a49c-5e0dc98ca4af-config\") pod \"dnsmasq-dns-77585f5f8c-8k779\" (UID: \"b9c4944f-6978-483c-a49c-5e0dc98ca4af\") " pod="openstack/dnsmasq-dns-77585f5f8c-8k779" Sep 29 21:41:17 crc kubenswrapper[4911]: I0929 21:41:17.479958 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b9c4944f-6978-483c-a49c-5e0dc98ca4af-dns-swift-storage-0\") pod \"dnsmasq-dns-77585f5f8c-8k779\" (UID: \"b9c4944f-6978-483c-a49c-5e0dc98ca4af\") " pod="openstack/dnsmasq-dns-77585f5f8c-8k779" Sep 29 21:41:17 crc kubenswrapper[4911]: I0929 21:41:17.480102 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b9c4944f-6978-483c-a49c-5e0dc98ca4af-ovsdbserver-nb\") pod \"dnsmasq-dns-77585f5f8c-8k779\" (UID: \"b9c4944f-6978-483c-a49c-5e0dc98ca4af\") " pod="openstack/dnsmasq-dns-77585f5f8c-8k779" Sep 29 21:41:17 crc kubenswrapper[4911]: I0929 21:41:17.480565 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b9c4944f-6978-483c-a49c-5e0dc98ca4af-dns-svc\") pod \"dnsmasq-dns-77585f5f8c-8k779\" (UID: \"b9c4944f-6978-483c-a49c-5e0dc98ca4af\") " pod="openstack/dnsmasq-dns-77585f5f8c-8k779" Sep 29 21:41:17 crc kubenswrapper[4911]: I0929 21:41:17.481330 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b9c4944f-6978-483c-a49c-5e0dc98ca4af-ovsdbserver-sb\") pod \"dnsmasq-dns-77585f5f8c-8k779\" (UID: \"b9c4944f-6978-483c-a49c-5e0dc98ca4af\") " pod="openstack/dnsmasq-dns-77585f5f8c-8k779" Sep 29 21:41:17 crc kubenswrapper[4911]: I0929 21:41:17.497945 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f59hd\" (UniqueName: \"kubernetes.io/projected/b9c4944f-6978-483c-a49c-5e0dc98ca4af-kube-api-access-f59hd\") pod \"dnsmasq-dns-77585f5f8c-8k779\" (UID: \"b9c4944f-6978-483c-a49c-5e0dc98ca4af\") " pod="openstack/dnsmasq-dns-77585f5f8c-8k779" Sep 29 21:41:17 crc kubenswrapper[4911]: I0929 21:41:17.591914 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77585f5f8c-8k779" Sep 29 21:41:18 crc kubenswrapper[4911]: I0929 21:41:18.023373 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-8k779"] Sep 29 21:41:18 crc kubenswrapper[4911]: W0929 21:41:18.023836 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb9c4944f_6978_483c_a49c_5e0dc98ca4af.slice/crio-898b8200a2af3a45dfcfd4f34021ee799d9eb67b3eddfd4cc244f0f66559517d WatchSource:0}: Error finding container 898b8200a2af3a45dfcfd4f34021ee799d9eb67b3eddfd4cc244f0f66559517d: Status 404 returned error can't find the container with id 898b8200a2af3a45dfcfd4f34021ee799d9eb67b3eddfd4cc244f0f66559517d Sep 29 21:41:19 crc kubenswrapper[4911]: I0929 21:41:19.004393 4911 generic.go:334] "Generic (PLEG): container finished" podID="b9c4944f-6978-483c-a49c-5e0dc98ca4af" containerID="380a388c68c9704fb9f5b6f4f56b098df39fa3c57273adbc8f71b7202e793c9b" exitCode=0 Sep 29 21:41:19 crc kubenswrapper[4911]: I0929 21:41:19.004484 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77585f5f8c-8k779" event={"ID":"b9c4944f-6978-483c-a49c-5e0dc98ca4af","Type":"ContainerDied","Data":"380a388c68c9704fb9f5b6f4f56b098df39fa3c57273adbc8f71b7202e793c9b"} Sep 29 21:41:19 crc kubenswrapper[4911]: I0929 21:41:19.004769 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77585f5f8c-8k779" event={"ID":"b9c4944f-6978-483c-a49c-5e0dc98ca4af","Type":"ContainerStarted","Data":"898b8200a2af3a45dfcfd4f34021ee799d9eb67b3eddfd4cc244f0f66559517d"} Sep 29 21:41:20 crc kubenswrapper[4911]: I0929 21:41:20.022175 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77585f5f8c-8k779" event={"ID":"b9c4944f-6978-483c-a49c-5e0dc98ca4af","Type":"ContainerStarted","Data":"878645bc3d2a6004bc5236fab6bfaa6a001dd82afcd3aeb277806c9ff8373a68"} Sep 29 21:41:20 crc kubenswrapper[4911]: I0929 21:41:20.022588 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-77585f5f8c-8k779" Sep 29 21:41:20 crc kubenswrapper[4911]: I0929 21:41:20.063967 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-77585f5f8c-8k779" podStartSLOduration=3.063936306 podStartE2EDuration="3.063936306s" podCreationTimestamp="2025-09-29 21:41:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:41:20.046306477 +0000 UTC m=+958.023419158" watchObservedRunningTime="2025-09-29 21:41:20.063936306 +0000 UTC m=+958.041049017" Sep 29 21:41:24 crc kubenswrapper[4911]: I0929 21:41:24.257930 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Sep 29 21:41:24 crc kubenswrapper[4911]: I0929 21:41:24.506993 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:41:24 crc kubenswrapper[4911]: I0929 21:41:24.586410 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-652pw"] Sep 29 21:41:24 crc kubenswrapper[4911]: I0929 21:41:24.587878 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-652pw" Sep 29 21:41:24 crc kubenswrapper[4911]: I0929 21:41:24.621551 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-652pw"] Sep 29 21:41:24 crc kubenswrapper[4911]: I0929 21:41:24.688061 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gdrt6\" (UniqueName: \"kubernetes.io/projected/c4176d75-f7e0-4327-97cc-f1d89925650f-kube-api-access-gdrt6\") pod \"cinder-db-create-652pw\" (UID: \"c4176d75-f7e0-4327-97cc-f1d89925650f\") " pod="openstack/cinder-db-create-652pw" Sep 29 21:41:24 crc kubenswrapper[4911]: I0929 21:41:24.688996 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-zvhtr"] Sep 29 21:41:24 crc kubenswrapper[4911]: I0929 21:41:24.689977 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-zvhtr" Sep 29 21:41:24 crc kubenswrapper[4911]: I0929 21:41:24.732241 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-zvhtr"] Sep 29 21:41:24 crc kubenswrapper[4911]: I0929 21:41:24.789633 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gdrt6\" (UniqueName: \"kubernetes.io/projected/c4176d75-f7e0-4327-97cc-f1d89925650f-kube-api-access-gdrt6\") pod \"cinder-db-create-652pw\" (UID: \"c4176d75-f7e0-4327-97cc-f1d89925650f\") " pod="openstack/cinder-db-create-652pw" Sep 29 21:41:24 crc kubenswrapper[4911]: I0929 21:41:24.789886 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kmqmb\" (UniqueName: \"kubernetes.io/projected/5bae9375-72d0-4d80-a85b-00e594f08ec5-kube-api-access-kmqmb\") pod \"barbican-db-create-zvhtr\" (UID: \"5bae9375-72d0-4d80-a85b-00e594f08ec5\") " pod="openstack/barbican-db-create-zvhtr" Sep 29 21:41:24 crc kubenswrapper[4911]: I0929 21:41:24.829633 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gdrt6\" (UniqueName: \"kubernetes.io/projected/c4176d75-f7e0-4327-97cc-f1d89925650f-kube-api-access-gdrt6\") pod \"cinder-db-create-652pw\" (UID: \"c4176d75-f7e0-4327-97cc-f1d89925650f\") " pod="openstack/cinder-db-create-652pw" Sep 29 21:41:24 crc kubenswrapper[4911]: I0929 21:41:24.889055 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-qs6d4"] Sep 29 21:41:24 crc kubenswrapper[4911]: I0929 21:41:24.890351 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-qs6d4" Sep 29 21:41:24 crc kubenswrapper[4911]: I0929 21:41:24.891094 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kmqmb\" (UniqueName: \"kubernetes.io/projected/5bae9375-72d0-4d80-a85b-00e594f08ec5-kube-api-access-kmqmb\") pod \"barbican-db-create-zvhtr\" (UID: \"5bae9375-72d0-4d80-a85b-00e594f08ec5\") " pod="openstack/barbican-db-create-zvhtr" Sep 29 21:41:24 crc kubenswrapper[4911]: I0929 21:41:24.907467 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-qs6d4"] Sep 29 21:41:24 crc kubenswrapper[4911]: I0929 21:41:24.917276 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-652pw" Sep 29 21:41:24 crc kubenswrapper[4911]: I0929 21:41:24.921728 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kmqmb\" (UniqueName: \"kubernetes.io/projected/5bae9375-72d0-4d80-a85b-00e594f08ec5-kube-api-access-kmqmb\") pod \"barbican-db-create-zvhtr\" (UID: \"5bae9375-72d0-4d80-a85b-00e594f08ec5\") " pod="openstack/barbican-db-create-zvhtr" Sep 29 21:41:24 crc kubenswrapper[4911]: I0929 21:41:24.957549 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-wfp9c"] Sep 29 21:41:24 crc kubenswrapper[4911]: I0929 21:41:24.958666 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-wfp9c" Sep 29 21:41:24 crc kubenswrapper[4911]: I0929 21:41:24.964931 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-wfp9c"] Sep 29 21:41:24 crc kubenswrapper[4911]: I0929 21:41:24.964967 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Sep 29 21:41:24 crc kubenswrapper[4911]: I0929 21:41:24.964967 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Sep 29 21:41:24 crc kubenswrapper[4911]: I0929 21:41:24.964994 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Sep 29 21:41:24 crc kubenswrapper[4911]: I0929 21:41:24.965110 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-bg252" Sep 29 21:41:24 crc kubenswrapper[4911]: I0929 21:41:24.998177 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a94e08fd-5765-4631-8e54-ec22daab0ca1-combined-ca-bundle\") pod \"keystone-db-sync-wfp9c\" (UID: \"a94e08fd-5765-4631-8e54-ec22daab0ca1\") " pod="openstack/keystone-db-sync-wfp9c" Sep 29 21:41:24 crc kubenswrapper[4911]: I0929 21:41:24.998235 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c5vn7\" (UniqueName: \"kubernetes.io/projected/a94e08fd-5765-4631-8e54-ec22daab0ca1-kube-api-access-c5vn7\") pod \"keystone-db-sync-wfp9c\" (UID: \"a94e08fd-5765-4631-8e54-ec22daab0ca1\") " pod="openstack/keystone-db-sync-wfp9c" Sep 29 21:41:24 crc kubenswrapper[4911]: I0929 21:41:24.998262 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a94e08fd-5765-4631-8e54-ec22daab0ca1-config-data\") pod \"keystone-db-sync-wfp9c\" (UID: \"a94e08fd-5765-4631-8e54-ec22daab0ca1\") " pod="openstack/keystone-db-sync-wfp9c" Sep 29 21:41:24 crc kubenswrapper[4911]: I0929 21:41:24.998287 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vncg7\" (UniqueName: \"kubernetes.io/projected/15d8f947-4b2f-4605-a19a-2908a833d854-kube-api-access-vncg7\") pod \"neutron-db-create-qs6d4\" (UID: \"15d8f947-4b2f-4605-a19a-2908a833d854\") " pod="openstack/neutron-db-create-qs6d4" Sep 29 21:41:25 crc kubenswrapper[4911]: I0929 21:41:25.034390 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-zvhtr" Sep 29 21:41:25 crc kubenswrapper[4911]: I0929 21:41:25.100055 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c5vn7\" (UniqueName: \"kubernetes.io/projected/a94e08fd-5765-4631-8e54-ec22daab0ca1-kube-api-access-c5vn7\") pod \"keystone-db-sync-wfp9c\" (UID: \"a94e08fd-5765-4631-8e54-ec22daab0ca1\") " pod="openstack/keystone-db-sync-wfp9c" Sep 29 21:41:25 crc kubenswrapper[4911]: I0929 21:41:25.100119 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a94e08fd-5765-4631-8e54-ec22daab0ca1-config-data\") pod \"keystone-db-sync-wfp9c\" (UID: \"a94e08fd-5765-4631-8e54-ec22daab0ca1\") " pod="openstack/keystone-db-sync-wfp9c" Sep 29 21:41:25 crc kubenswrapper[4911]: I0929 21:41:25.100161 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vncg7\" (UniqueName: \"kubernetes.io/projected/15d8f947-4b2f-4605-a19a-2908a833d854-kube-api-access-vncg7\") pod \"neutron-db-create-qs6d4\" (UID: \"15d8f947-4b2f-4605-a19a-2908a833d854\") " pod="openstack/neutron-db-create-qs6d4" Sep 29 21:41:25 crc kubenswrapper[4911]: I0929 21:41:25.100272 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a94e08fd-5765-4631-8e54-ec22daab0ca1-combined-ca-bundle\") pod \"keystone-db-sync-wfp9c\" (UID: \"a94e08fd-5765-4631-8e54-ec22daab0ca1\") " pod="openstack/keystone-db-sync-wfp9c" Sep 29 21:41:25 crc kubenswrapper[4911]: I0929 21:41:25.110938 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a94e08fd-5765-4631-8e54-ec22daab0ca1-combined-ca-bundle\") pod \"keystone-db-sync-wfp9c\" (UID: \"a94e08fd-5765-4631-8e54-ec22daab0ca1\") " pod="openstack/keystone-db-sync-wfp9c" Sep 29 21:41:25 crc kubenswrapper[4911]: I0929 21:41:25.112587 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a94e08fd-5765-4631-8e54-ec22daab0ca1-config-data\") pod \"keystone-db-sync-wfp9c\" (UID: \"a94e08fd-5765-4631-8e54-ec22daab0ca1\") " pod="openstack/keystone-db-sync-wfp9c" Sep 29 21:41:25 crc kubenswrapper[4911]: I0929 21:41:25.122667 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c5vn7\" (UniqueName: \"kubernetes.io/projected/a94e08fd-5765-4631-8e54-ec22daab0ca1-kube-api-access-c5vn7\") pod \"keystone-db-sync-wfp9c\" (UID: \"a94e08fd-5765-4631-8e54-ec22daab0ca1\") " pod="openstack/keystone-db-sync-wfp9c" Sep 29 21:41:25 crc kubenswrapper[4911]: I0929 21:41:25.123494 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vncg7\" (UniqueName: \"kubernetes.io/projected/15d8f947-4b2f-4605-a19a-2908a833d854-kube-api-access-vncg7\") pod \"neutron-db-create-qs6d4\" (UID: \"15d8f947-4b2f-4605-a19a-2908a833d854\") " pod="openstack/neutron-db-create-qs6d4" Sep 29 21:41:25 crc kubenswrapper[4911]: I0929 21:41:25.208185 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-qs6d4" Sep 29 21:41:25 crc kubenswrapper[4911]: I0929 21:41:25.274899 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-wfp9c" Sep 29 21:41:27 crc kubenswrapper[4911]: I0929 21:41:27.593960 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-77585f5f8c-8k779" Sep 29 21:41:27 crc kubenswrapper[4911]: I0929 21:41:27.679040 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-698758b865-m6jw6"] Sep 29 21:41:27 crc kubenswrapper[4911]: I0929 21:41:27.679283 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-698758b865-m6jw6" podUID="f88b6474-345c-4c65-9455-10feb2d34fd3" containerName="dnsmasq-dns" containerID="cri-o://c2e3e297621597cd32f423865481dee395d79358feb30b394cd97f6e37a0357f" gracePeriod=10 Sep 29 21:41:28 crc kubenswrapper[4911]: I0929 21:41:28.108678 4911 generic.go:334] "Generic (PLEG): container finished" podID="f88b6474-345c-4c65-9455-10feb2d34fd3" containerID="c2e3e297621597cd32f423865481dee395d79358feb30b394cd97f6e37a0357f" exitCode=0 Sep 29 21:41:28 crc kubenswrapper[4911]: I0929 21:41:28.108808 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-m6jw6" event={"ID":"f88b6474-345c-4c65-9455-10feb2d34fd3","Type":"ContainerDied","Data":"c2e3e297621597cd32f423865481dee395d79358feb30b394cd97f6e37a0357f"} Sep 29 21:41:28 crc kubenswrapper[4911]: I0929 21:41:28.145548 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-m6jw6" Sep 29 21:41:28 crc kubenswrapper[4911]: I0929 21:41:28.259432 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f88b6474-345c-4c65-9455-10feb2d34fd3-config\") pod \"f88b6474-345c-4c65-9455-10feb2d34fd3\" (UID: \"f88b6474-345c-4c65-9455-10feb2d34fd3\") " Sep 29 21:41:28 crc kubenswrapper[4911]: I0929 21:41:28.259538 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f88b6474-345c-4c65-9455-10feb2d34fd3-ovsdbserver-sb\") pod \"f88b6474-345c-4c65-9455-10feb2d34fd3\" (UID: \"f88b6474-345c-4c65-9455-10feb2d34fd3\") " Sep 29 21:41:28 crc kubenswrapper[4911]: I0929 21:41:28.259559 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f88b6474-345c-4c65-9455-10feb2d34fd3-dns-svc\") pod \"f88b6474-345c-4c65-9455-10feb2d34fd3\" (UID: \"f88b6474-345c-4c65-9455-10feb2d34fd3\") " Sep 29 21:41:28 crc kubenswrapper[4911]: I0929 21:41:28.259582 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5pqp6\" (UniqueName: \"kubernetes.io/projected/f88b6474-345c-4c65-9455-10feb2d34fd3-kube-api-access-5pqp6\") pod \"f88b6474-345c-4c65-9455-10feb2d34fd3\" (UID: \"f88b6474-345c-4c65-9455-10feb2d34fd3\") " Sep 29 21:41:28 crc kubenswrapper[4911]: I0929 21:41:28.259664 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f88b6474-345c-4c65-9455-10feb2d34fd3-ovsdbserver-nb\") pod \"f88b6474-345c-4c65-9455-10feb2d34fd3\" (UID: \"f88b6474-345c-4c65-9455-10feb2d34fd3\") " Sep 29 21:41:28 crc kubenswrapper[4911]: I0929 21:41:28.266731 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88b6474-345c-4c65-9455-10feb2d34fd3-kube-api-access-5pqp6" (OuterVolumeSpecName: "kube-api-access-5pqp6") pod "f88b6474-345c-4c65-9455-10feb2d34fd3" (UID: "f88b6474-345c-4c65-9455-10feb2d34fd3"). InnerVolumeSpecName "kube-api-access-5pqp6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:41:28 crc kubenswrapper[4911]: I0929 21:41:28.306195 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f88b6474-345c-4c65-9455-10feb2d34fd3-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "f88b6474-345c-4c65-9455-10feb2d34fd3" (UID: "f88b6474-345c-4c65-9455-10feb2d34fd3"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:41:28 crc kubenswrapper[4911]: I0929 21:41:28.309155 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f88b6474-345c-4c65-9455-10feb2d34fd3-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "f88b6474-345c-4c65-9455-10feb2d34fd3" (UID: "f88b6474-345c-4c65-9455-10feb2d34fd3"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:41:28 crc kubenswrapper[4911]: I0929 21:41:28.311556 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f88b6474-345c-4c65-9455-10feb2d34fd3-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f88b6474-345c-4c65-9455-10feb2d34fd3" (UID: "f88b6474-345c-4c65-9455-10feb2d34fd3"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:41:28 crc kubenswrapper[4911]: I0929 21:41:28.311620 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f88b6474-345c-4c65-9455-10feb2d34fd3-config" (OuterVolumeSpecName: "config") pod "f88b6474-345c-4c65-9455-10feb2d34fd3" (UID: "f88b6474-345c-4c65-9455-10feb2d34fd3"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:41:28 crc kubenswrapper[4911]: I0929 21:41:28.361036 4911 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f88b6474-345c-4c65-9455-10feb2d34fd3-config\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:28 crc kubenswrapper[4911]: I0929 21:41:28.361064 4911 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f88b6474-345c-4c65-9455-10feb2d34fd3-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:28 crc kubenswrapper[4911]: I0929 21:41:28.361074 4911 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f88b6474-345c-4c65-9455-10feb2d34fd3-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:28 crc kubenswrapper[4911]: I0929 21:41:28.361083 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5pqp6\" (UniqueName: \"kubernetes.io/projected/f88b6474-345c-4c65-9455-10feb2d34fd3-kube-api-access-5pqp6\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:28 crc kubenswrapper[4911]: I0929 21:41:28.361093 4911 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f88b6474-345c-4c65-9455-10feb2d34fd3-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:28 crc kubenswrapper[4911]: I0929 21:41:28.433966 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-wfp9c"] Sep 29 21:41:28 crc kubenswrapper[4911]: W0929 21:41:28.446212 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda94e08fd_5765_4631_8e54_ec22daab0ca1.slice/crio-65dd8d0a78695736c5b436f7e26d369db6ff04ff1496c7efc2e0b58e78a16062 WatchSource:0}: Error finding container 65dd8d0a78695736c5b436f7e26d369db6ff04ff1496c7efc2e0b58e78a16062: Status 404 returned error can't find the container with id 65dd8d0a78695736c5b436f7e26d369db6ff04ff1496c7efc2e0b58e78a16062 Sep 29 21:41:28 crc kubenswrapper[4911]: I0929 21:41:28.493578 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-652pw"] Sep 29 21:41:28 crc kubenswrapper[4911]: I0929 21:41:28.502122 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-qs6d4"] Sep 29 21:41:28 crc kubenswrapper[4911]: W0929 21:41:28.506326 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc4176d75_f7e0_4327_97cc_f1d89925650f.slice/crio-467fedfff085da23398e852612507d1efbd017dc58c3d38abdffeef41df8423a WatchSource:0}: Error finding container 467fedfff085da23398e852612507d1efbd017dc58c3d38abdffeef41df8423a: Status 404 returned error can't find the container with id 467fedfff085da23398e852612507d1efbd017dc58c3d38abdffeef41df8423a Sep 29 21:41:28 crc kubenswrapper[4911]: I0929 21:41:28.523932 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-zvhtr"] Sep 29 21:41:28 crc kubenswrapper[4911]: W0929 21:41:28.547834 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5bae9375_72d0_4d80_a85b_00e594f08ec5.slice/crio-6b9eeba6d5166bd1bcd8ce2a8e6268f95af32ef5a5cfb888d5f6abaf29d4b779 WatchSource:0}: Error finding container 6b9eeba6d5166bd1bcd8ce2a8e6268f95af32ef5a5cfb888d5f6abaf29d4b779: Status 404 returned error can't find the container with id 6b9eeba6d5166bd1bcd8ce2a8e6268f95af32ef5a5cfb888d5f6abaf29d4b779 Sep 29 21:41:29 crc kubenswrapper[4911]: I0929 21:41:29.118447 4911 generic.go:334] "Generic (PLEG): container finished" podID="5bae9375-72d0-4d80-a85b-00e594f08ec5" containerID="b63faa3e3e537bb1eaa6a56fa565b1e4d107c246b5dab88a7cd4375eafb3270c" exitCode=0 Sep 29 21:41:29 crc kubenswrapper[4911]: I0929 21:41:29.118627 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-zvhtr" event={"ID":"5bae9375-72d0-4d80-a85b-00e594f08ec5","Type":"ContainerDied","Data":"b63faa3e3e537bb1eaa6a56fa565b1e4d107c246b5dab88a7cd4375eafb3270c"} Sep 29 21:41:29 crc kubenswrapper[4911]: I0929 21:41:29.119008 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-zvhtr" event={"ID":"5bae9375-72d0-4d80-a85b-00e594f08ec5","Type":"ContainerStarted","Data":"6b9eeba6d5166bd1bcd8ce2a8e6268f95af32ef5a5cfb888d5f6abaf29d4b779"} Sep 29 21:41:29 crc kubenswrapper[4911]: I0929 21:41:29.121357 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-w6kfd" event={"ID":"3e8211a0-b5a5-4dd2-8b18-6616202ebe45","Type":"ContainerStarted","Data":"288c6d420e2975a9b261dcf8c4d8f3f77ab2873dee67bbf7f5c807c9dd6be63c"} Sep 29 21:41:29 crc kubenswrapper[4911]: I0929 21:41:29.123658 4911 generic.go:334] "Generic (PLEG): container finished" podID="c4176d75-f7e0-4327-97cc-f1d89925650f" containerID="378795debcf63ff3b42f8b80f1ae7e232a330f411c1ccff346293a475fb76741" exitCode=0 Sep 29 21:41:29 crc kubenswrapper[4911]: I0929 21:41:29.123730 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-652pw" event={"ID":"c4176d75-f7e0-4327-97cc-f1d89925650f","Type":"ContainerDied","Data":"378795debcf63ff3b42f8b80f1ae7e232a330f411c1ccff346293a475fb76741"} Sep 29 21:41:29 crc kubenswrapper[4911]: I0929 21:41:29.123815 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-652pw" event={"ID":"c4176d75-f7e0-4327-97cc-f1d89925650f","Type":"ContainerStarted","Data":"467fedfff085da23398e852612507d1efbd017dc58c3d38abdffeef41df8423a"} Sep 29 21:41:29 crc kubenswrapper[4911]: I0929 21:41:29.129133 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-m6jw6" event={"ID":"f88b6474-345c-4c65-9455-10feb2d34fd3","Type":"ContainerDied","Data":"f4bbc103efe55cb97996a58b0f1d2f364b9d56fed9e32a89246145a88df5c904"} Sep 29 21:41:29 crc kubenswrapper[4911]: I0929 21:41:29.129186 4911 scope.go:117] "RemoveContainer" containerID="c2e3e297621597cd32f423865481dee395d79358feb30b394cd97f6e37a0357f" Sep 29 21:41:29 crc kubenswrapper[4911]: I0929 21:41:29.129186 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-m6jw6" Sep 29 21:41:29 crc kubenswrapper[4911]: I0929 21:41:29.134628 4911 generic.go:334] "Generic (PLEG): container finished" podID="15d8f947-4b2f-4605-a19a-2908a833d854" containerID="ee1bcd6779a9cc37261fd73aff389d679acb273ed48d8724fb8a3d7428dffff6" exitCode=0 Sep 29 21:41:29 crc kubenswrapper[4911]: I0929 21:41:29.134775 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-qs6d4" event={"ID":"15d8f947-4b2f-4605-a19a-2908a833d854","Type":"ContainerDied","Data":"ee1bcd6779a9cc37261fd73aff389d679acb273ed48d8724fb8a3d7428dffff6"} Sep 29 21:41:29 crc kubenswrapper[4911]: I0929 21:41:29.134815 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-qs6d4" event={"ID":"15d8f947-4b2f-4605-a19a-2908a833d854","Type":"ContainerStarted","Data":"3df1d2be2e0421f8844b523f23610ee7e9fee39462eb5d954e079868cca9d56a"} Sep 29 21:41:29 crc kubenswrapper[4911]: I0929 21:41:29.136511 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-wfp9c" event={"ID":"a94e08fd-5765-4631-8e54-ec22daab0ca1","Type":"ContainerStarted","Data":"65dd8d0a78695736c5b436f7e26d369db6ff04ff1496c7efc2e0b58e78a16062"} Sep 29 21:41:29 crc kubenswrapper[4911]: I0929 21:41:29.152219 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-w6kfd" podStartSLOduration=4.767043614 podStartE2EDuration="17.15219886s" podCreationTimestamp="2025-09-29 21:41:12 +0000 UTC" firstStartedPulling="2025-09-29 21:41:15.51646977 +0000 UTC m=+953.493582441" lastFinishedPulling="2025-09-29 21:41:27.901625016 +0000 UTC m=+965.878737687" observedRunningTime="2025-09-29 21:41:29.147712061 +0000 UTC m=+967.124824772" watchObservedRunningTime="2025-09-29 21:41:29.15219886 +0000 UTC m=+967.129311531" Sep 29 21:41:29 crc kubenswrapper[4911]: I0929 21:41:29.177962 4911 scope.go:117] "RemoveContainer" containerID="bb5625680b66758b8e48e47e158401bae96bcbc9d6f4218abe4fe5c9c98c7df0" Sep 29 21:41:29 crc kubenswrapper[4911]: I0929 21:41:29.202698 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-698758b865-m6jw6"] Sep 29 21:41:29 crc kubenswrapper[4911]: I0929 21:41:29.210505 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-698758b865-m6jw6"] Sep 29 21:41:30 crc kubenswrapper[4911]: I0929 21:41:30.712565 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88b6474-345c-4c65-9455-10feb2d34fd3" path="/var/lib/kubelet/pods/f88b6474-345c-4c65-9455-10feb2d34fd3/volumes" Sep 29 21:41:32 crc kubenswrapper[4911]: I0929 21:41:32.862214 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-zvhtr" Sep 29 21:41:32 crc kubenswrapper[4911]: I0929 21:41:32.868932 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-652pw" Sep 29 21:41:32 crc kubenswrapper[4911]: I0929 21:41:32.891850 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-qs6d4" Sep 29 21:41:32 crc kubenswrapper[4911]: I0929 21:41:32.951726 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vncg7\" (UniqueName: \"kubernetes.io/projected/15d8f947-4b2f-4605-a19a-2908a833d854-kube-api-access-vncg7\") pod \"15d8f947-4b2f-4605-a19a-2908a833d854\" (UID: \"15d8f947-4b2f-4605-a19a-2908a833d854\") " Sep 29 21:41:32 crc kubenswrapper[4911]: I0929 21:41:32.951764 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kmqmb\" (UniqueName: \"kubernetes.io/projected/5bae9375-72d0-4d80-a85b-00e594f08ec5-kube-api-access-kmqmb\") pod \"5bae9375-72d0-4d80-a85b-00e594f08ec5\" (UID: \"5bae9375-72d0-4d80-a85b-00e594f08ec5\") " Sep 29 21:41:32 crc kubenswrapper[4911]: I0929 21:41:32.951817 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gdrt6\" (UniqueName: \"kubernetes.io/projected/c4176d75-f7e0-4327-97cc-f1d89925650f-kube-api-access-gdrt6\") pod \"c4176d75-f7e0-4327-97cc-f1d89925650f\" (UID: \"c4176d75-f7e0-4327-97cc-f1d89925650f\") " Sep 29 21:41:32 crc kubenswrapper[4911]: I0929 21:41:32.956014 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c4176d75-f7e0-4327-97cc-f1d89925650f-kube-api-access-gdrt6" (OuterVolumeSpecName: "kube-api-access-gdrt6") pod "c4176d75-f7e0-4327-97cc-f1d89925650f" (UID: "c4176d75-f7e0-4327-97cc-f1d89925650f"). InnerVolumeSpecName "kube-api-access-gdrt6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:41:32 crc kubenswrapper[4911]: I0929 21:41:32.956527 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/15d8f947-4b2f-4605-a19a-2908a833d854-kube-api-access-vncg7" (OuterVolumeSpecName: "kube-api-access-vncg7") pod "15d8f947-4b2f-4605-a19a-2908a833d854" (UID: "15d8f947-4b2f-4605-a19a-2908a833d854"). InnerVolumeSpecName "kube-api-access-vncg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:41:32 crc kubenswrapper[4911]: I0929 21:41:32.958188 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5bae9375-72d0-4d80-a85b-00e594f08ec5-kube-api-access-kmqmb" (OuterVolumeSpecName: "kube-api-access-kmqmb") pod "5bae9375-72d0-4d80-a85b-00e594f08ec5" (UID: "5bae9375-72d0-4d80-a85b-00e594f08ec5"). InnerVolumeSpecName "kube-api-access-kmqmb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:41:33 crc kubenswrapper[4911]: I0929 21:41:33.053616 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vncg7\" (UniqueName: \"kubernetes.io/projected/15d8f947-4b2f-4605-a19a-2908a833d854-kube-api-access-vncg7\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:33 crc kubenswrapper[4911]: I0929 21:41:33.053671 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kmqmb\" (UniqueName: \"kubernetes.io/projected/5bae9375-72d0-4d80-a85b-00e594f08ec5-kube-api-access-kmqmb\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:33 crc kubenswrapper[4911]: I0929 21:41:33.053690 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gdrt6\" (UniqueName: \"kubernetes.io/projected/c4176d75-f7e0-4327-97cc-f1d89925650f-kube-api-access-gdrt6\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:33 crc kubenswrapper[4911]: I0929 21:41:33.179679 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-652pw" Sep 29 21:41:33 crc kubenswrapper[4911]: I0929 21:41:33.187698 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-652pw" event={"ID":"c4176d75-f7e0-4327-97cc-f1d89925650f","Type":"ContainerDied","Data":"467fedfff085da23398e852612507d1efbd017dc58c3d38abdffeef41df8423a"} Sep 29 21:41:33 crc kubenswrapper[4911]: I0929 21:41:33.187771 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="467fedfff085da23398e852612507d1efbd017dc58c3d38abdffeef41df8423a" Sep 29 21:41:33 crc kubenswrapper[4911]: I0929 21:41:33.193288 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-qs6d4" event={"ID":"15d8f947-4b2f-4605-a19a-2908a833d854","Type":"ContainerDied","Data":"3df1d2be2e0421f8844b523f23610ee7e9fee39462eb5d954e079868cca9d56a"} Sep 29 21:41:33 crc kubenswrapper[4911]: I0929 21:41:33.193496 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3df1d2be2e0421f8844b523f23610ee7e9fee39462eb5d954e079868cca9d56a" Sep 29 21:41:33 crc kubenswrapper[4911]: I0929 21:41:33.193303 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-qs6d4" Sep 29 21:41:33 crc kubenswrapper[4911]: I0929 21:41:33.195295 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-wfp9c" event={"ID":"a94e08fd-5765-4631-8e54-ec22daab0ca1","Type":"ContainerStarted","Data":"7c70bc3366af620d994b0869bda26a39f3a4f027e7aab1efb4bbfeaeadb8f99b"} Sep 29 21:41:33 crc kubenswrapper[4911]: I0929 21:41:33.197288 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-zvhtr" event={"ID":"5bae9375-72d0-4d80-a85b-00e594f08ec5","Type":"ContainerDied","Data":"6b9eeba6d5166bd1bcd8ce2a8e6268f95af32ef5a5cfb888d5f6abaf29d4b779"} Sep 29 21:41:33 crc kubenswrapper[4911]: I0929 21:41:33.197312 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6b9eeba6d5166bd1bcd8ce2a8e6268f95af32ef5a5cfb888d5f6abaf29d4b779" Sep 29 21:41:33 crc kubenswrapper[4911]: I0929 21:41:33.197341 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-zvhtr" Sep 29 21:41:33 crc kubenswrapper[4911]: I0929 21:41:33.214394 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-wfp9c" podStartSLOduration=4.940710611 podStartE2EDuration="9.21437982s" podCreationTimestamp="2025-09-29 21:41:24 +0000 UTC" firstStartedPulling="2025-09-29 21:41:28.451687568 +0000 UTC m=+966.428800229" lastFinishedPulling="2025-09-29 21:41:32.725356747 +0000 UTC m=+970.702469438" observedRunningTime="2025-09-29 21:41:33.209190409 +0000 UTC m=+971.186303090" watchObservedRunningTime="2025-09-29 21:41:33.21437982 +0000 UTC m=+971.191492481" Sep 29 21:41:35 crc kubenswrapper[4911]: I0929 21:41:35.219489 4911 generic.go:334] "Generic (PLEG): container finished" podID="3e8211a0-b5a5-4dd2-8b18-6616202ebe45" containerID="288c6d420e2975a9b261dcf8c4d8f3f77ab2873dee67bbf7f5c807c9dd6be63c" exitCode=0 Sep 29 21:41:35 crc kubenswrapper[4911]: I0929 21:41:35.219617 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-w6kfd" event={"ID":"3e8211a0-b5a5-4dd2-8b18-6616202ebe45","Type":"ContainerDied","Data":"288c6d420e2975a9b261dcf8c4d8f3f77ab2873dee67bbf7f5c807c9dd6be63c"} Sep 29 21:41:36 crc kubenswrapper[4911]: I0929 21:41:36.239554 4911 generic.go:334] "Generic (PLEG): container finished" podID="a94e08fd-5765-4631-8e54-ec22daab0ca1" containerID="7c70bc3366af620d994b0869bda26a39f3a4f027e7aab1efb4bbfeaeadb8f99b" exitCode=0 Sep 29 21:41:36 crc kubenswrapper[4911]: I0929 21:41:36.239704 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-wfp9c" event={"ID":"a94e08fd-5765-4631-8e54-ec22daab0ca1","Type":"ContainerDied","Data":"7c70bc3366af620d994b0869bda26a39f3a4f027e7aab1efb4bbfeaeadb8f99b"} Sep 29 21:41:36 crc kubenswrapper[4911]: I0929 21:41:36.660451 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-w6kfd" Sep 29 21:41:36 crc kubenswrapper[4911]: I0929 21:41:36.822997 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e8211a0-b5a5-4dd2-8b18-6616202ebe45-config-data\") pod \"3e8211a0-b5a5-4dd2-8b18-6616202ebe45\" (UID: \"3e8211a0-b5a5-4dd2-8b18-6616202ebe45\") " Sep 29 21:41:36 crc kubenswrapper[4911]: I0929 21:41:36.823074 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e8211a0-b5a5-4dd2-8b18-6616202ebe45-combined-ca-bundle\") pod \"3e8211a0-b5a5-4dd2-8b18-6616202ebe45\" (UID: \"3e8211a0-b5a5-4dd2-8b18-6616202ebe45\") " Sep 29 21:41:36 crc kubenswrapper[4911]: I0929 21:41:36.823122 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xwwzq\" (UniqueName: \"kubernetes.io/projected/3e8211a0-b5a5-4dd2-8b18-6616202ebe45-kube-api-access-xwwzq\") pod \"3e8211a0-b5a5-4dd2-8b18-6616202ebe45\" (UID: \"3e8211a0-b5a5-4dd2-8b18-6616202ebe45\") " Sep 29 21:41:36 crc kubenswrapper[4911]: I0929 21:41:36.823306 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/3e8211a0-b5a5-4dd2-8b18-6616202ebe45-db-sync-config-data\") pod \"3e8211a0-b5a5-4dd2-8b18-6616202ebe45\" (UID: \"3e8211a0-b5a5-4dd2-8b18-6616202ebe45\") " Sep 29 21:41:36 crc kubenswrapper[4911]: I0929 21:41:36.846722 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e8211a0-b5a5-4dd2-8b18-6616202ebe45-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "3e8211a0-b5a5-4dd2-8b18-6616202ebe45" (UID: "3e8211a0-b5a5-4dd2-8b18-6616202ebe45"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:41:36 crc kubenswrapper[4911]: I0929 21:41:36.846925 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e8211a0-b5a5-4dd2-8b18-6616202ebe45-kube-api-access-xwwzq" (OuterVolumeSpecName: "kube-api-access-xwwzq") pod "3e8211a0-b5a5-4dd2-8b18-6616202ebe45" (UID: "3e8211a0-b5a5-4dd2-8b18-6616202ebe45"). InnerVolumeSpecName "kube-api-access-xwwzq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:41:36 crc kubenswrapper[4911]: I0929 21:41:36.858429 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e8211a0-b5a5-4dd2-8b18-6616202ebe45-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3e8211a0-b5a5-4dd2-8b18-6616202ebe45" (UID: "3e8211a0-b5a5-4dd2-8b18-6616202ebe45"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:41:36 crc kubenswrapper[4911]: I0929 21:41:36.896608 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e8211a0-b5a5-4dd2-8b18-6616202ebe45-config-data" (OuterVolumeSpecName: "config-data") pod "3e8211a0-b5a5-4dd2-8b18-6616202ebe45" (UID: "3e8211a0-b5a5-4dd2-8b18-6616202ebe45"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:41:36 crc kubenswrapper[4911]: I0929 21:41:36.927761 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xwwzq\" (UniqueName: \"kubernetes.io/projected/3e8211a0-b5a5-4dd2-8b18-6616202ebe45-kube-api-access-xwwzq\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:36 crc kubenswrapper[4911]: I0929 21:41:36.927818 4911 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/3e8211a0-b5a5-4dd2-8b18-6616202ebe45-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:36 crc kubenswrapper[4911]: I0929 21:41:36.927832 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e8211a0-b5a5-4dd2-8b18-6616202ebe45-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:36 crc kubenswrapper[4911]: I0929 21:41:36.927844 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e8211a0-b5a5-4dd2-8b18-6616202ebe45-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:37 crc kubenswrapper[4911]: I0929 21:41:37.251156 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-w6kfd" event={"ID":"3e8211a0-b5a5-4dd2-8b18-6616202ebe45","Type":"ContainerDied","Data":"0d928d8b55dce8206c17b825e9482248b34c876f6a4b29932eb32690272571cf"} Sep 29 21:41:37 crc kubenswrapper[4911]: I0929 21:41:37.251231 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0d928d8b55dce8206c17b825e9482248b34c876f6a4b29932eb32690272571cf" Sep 29 21:41:37 crc kubenswrapper[4911]: I0929 21:41:37.251189 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-w6kfd" Sep 29 21:41:37 crc kubenswrapper[4911]: I0929 21:41:37.511398 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-wfp9c" Sep 29 21:41:37 crc kubenswrapper[4911]: I0929 21:41:37.640506 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a94e08fd-5765-4631-8e54-ec22daab0ca1-combined-ca-bundle\") pod \"a94e08fd-5765-4631-8e54-ec22daab0ca1\" (UID: \"a94e08fd-5765-4631-8e54-ec22daab0ca1\") " Sep 29 21:41:37 crc kubenswrapper[4911]: I0929 21:41:37.640899 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a94e08fd-5765-4631-8e54-ec22daab0ca1-config-data\") pod \"a94e08fd-5765-4631-8e54-ec22daab0ca1\" (UID: \"a94e08fd-5765-4631-8e54-ec22daab0ca1\") " Sep 29 21:41:37 crc kubenswrapper[4911]: I0929 21:41:37.640954 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c5vn7\" (UniqueName: \"kubernetes.io/projected/a94e08fd-5765-4631-8e54-ec22daab0ca1-kube-api-access-c5vn7\") pod \"a94e08fd-5765-4631-8e54-ec22daab0ca1\" (UID: \"a94e08fd-5765-4631-8e54-ec22daab0ca1\") " Sep 29 21:41:37 crc kubenswrapper[4911]: I0929 21:41:37.645609 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a94e08fd-5765-4631-8e54-ec22daab0ca1-kube-api-access-c5vn7" (OuterVolumeSpecName: "kube-api-access-c5vn7") pod "a94e08fd-5765-4631-8e54-ec22daab0ca1" (UID: "a94e08fd-5765-4631-8e54-ec22daab0ca1"). InnerVolumeSpecName "kube-api-access-c5vn7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:41:37 crc kubenswrapper[4911]: I0929 21:41:37.714978 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a94e08fd-5765-4631-8e54-ec22daab0ca1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a94e08fd-5765-4631-8e54-ec22daab0ca1" (UID: "a94e08fd-5765-4631-8e54-ec22daab0ca1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:41:37 crc kubenswrapper[4911]: I0929 21:41:37.736868 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a94e08fd-5765-4631-8e54-ec22daab0ca1-config-data" (OuterVolumeSpecName: "config-data") pod "a94e08fd-5765-4631-8e54-ec22daab0ca1" (UID: "a94e08fd-5765-4631-8e54-ec22daab0ca1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:41:37 crc kubenswrapper[4911]: I0929 21:41:37.742625 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a94e08fd-5765-4631-8e54-ec22daab0ca1-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:37 crc kubenswrapper[4911]: I0929 21:41:37.742691 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c5vn7\" (UniqueName: \"kubernetes.io/projected/a94e08fd-5765-4631-8e54-ec22daab0ca1-kube-api-access-c5vn7\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:37 crc kubenswrapper[4911]: I0929 21:41:37.742882 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a94e08fd-5765-4631-8e54-ec22daab0ca1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:37 crc kubenswrapper[4911]: I0929 21:41:37.809177 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7ff5475cc9-kj4rt"] Sep 29 21:41:37 crc kubenswrapper[4911]: E0929 21:41:37.809556 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5bae9375-72d0-4d80-a85b-00e594f08ec5" containerName="mariadb-database-create" Sep 29 21:41:37 crc kubenswrapper[4911]: I0929 21:41:37.809575 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="5bae9375-72d0-4d80-a85b-00e594f08ec5" containerName="mariadb-database-create" Sep 29 21:41:37 crc kubenswrapper[4911]: E0929 21:41:37.809591 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e8211a0-b5a5-4dd2-8b18-6616202ebe45" containerName="glance-db-sync" Sep 29 21:41:37 crc kubenswrapper[4911]: I0929 21:41:37.809602 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e8211a0-b5a5-4dd2-8b18-6616202ebe45" containerName="glance-db-sync" Sep 29 21:41:37 crc kubenswrapper[4911]: E0929 21:41:37.809610 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4176d75-f7e0-4327-97cc-f1d89925650f" containerName="mariadb-database-create" Sep 29 21:41:37 crc kubenswrapper[4911]: I0929 21:41:37.809617 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4176d75-f7e0-4327-97cc-f1d89925650f" containerName="mariadb-database-create" Sep 29 21:41:37 crc kubenswrapper[4911]: E0929 21:41:37.809625 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f88b6474-345c-4c65-9455-10feb2d34fd3" containerName="dnsmasq-dns" Sep 29 21:41:37 crc kubenswrapper[4911]: I0929 21:41:37.809632 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="f88b6474-345c-4c65-9455-10feb2d34fd3" containerName="dnsmasq-dns" Sep 29 21:41:37 crc kubenswrapper[4911]: E0929 21:41:37.809645 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a94e08fd-5765-4631-8e54-ec22daab0ca1" containerName="keystone-db-sync" Sep 29 21:41:37 crc kubenswrapper[4911]: I0929 21:41:37.809651 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="a94e08fd-5765-4631-8e54-ec22daab0ca1" containerName="keystone-db-sync" Sep 29 21:41:37 crc kubenswrapper[4911]: E0929 21:41:37.809666 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f88b6474-345c-4c65-9455-10feb2d34fd3" containerName="init" Sep 29 21:41:37 crc kubenswrapper[4911]: I0929 21:41:37.809672 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="f88b6474-345c-4c65-9455-10feb2d34fd3" containerName="init" Sep 29 21:41:37 crc kubenswrapper[4911]: E0929 21:41:37.809690 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15d8f947-4b2f-4605-a19a-2908a833d854" containerName="mariadb-database-create" Sep 29 21:41:37 crc kubenswrapper[4911]: I0929 21:41:37.809698 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="15d8f947-4b2f-4605-a19a-2908a833d854" containerName="mariadb-database-create" Sep 29 21:41:37 crc kubenswrapper[4911]: I0929 21:41:37.809920 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e8211a0-b5a5-4dd2-8b18-6616202ebe45" containerName="glance-db-sync" Sep 29 21:41:37 crc kubenswrapper[4911]: I0929 21:41:37.809931 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="15d8f947-4b2f-4605-a19a-2908a833d854" containerName="mariadb-database-create" Sep 29 21:41:37 crc kubenswrapper[4911]: I0929 21:41:37.809938 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="5bae9375-72d0-4d80-a85b-00e594f08ec5" containerName="mariadb-database-create" Sep 29 21:41:37 crc kubenswrapper[4911]: I0929 21:41:37.809952 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="a94e08fd-5765-4631-8e54-ec22daab0ca1" containerName="keystone-db-sync" Sep 29 21:41:37 crc kubenswrapper[4911]: I0929 21:41:37.809963 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="c4176d75-f7e0-4327-97cc-f1d89925650f" containerName="mariadb-database-create" Sep 29 21:41:37 crc kubenswrapper[4911]: I0929 21:41:37.809974 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="f88b6474-345c-4c65-9455-10feb2d34fd3" containerName="dnsmasq-dns" Sep 29 21:41:37 crc kubenswrapper[4911]: I0929 21:41:37.810863 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7ff5475cc9-kj4rt" Sep 29 21:41:37 crc kubenswrapper[4911]: I0929 21:41:37.828552 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7ff5475cc9-kj4rt"] Sep 29 21:41:37 crc kubenswrapper[4911]: I0929 21:41:37.845730 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ffd374d3-297b-4938-8e3f-ee2ddb92cc8a-ovsdbserver-nb\") pod \"dnsmasq-dns-7ff5475cc9-kj4rt\" (UID: \"ffd374d3-297b-4938-8e3f-ee2ddb92cc8a\") " pod="openstack/dnsmasq-dns-7ff5475cc9-kj4rt" Sep 29 21:41:37 crc kubenswrapper[4911]: I0929 21:41:37.846008 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ffd374d3-297b-4938-8e3f-ee2ddb92cc8a-dns-swift-storage-0\") pod \"dnsmasq-dns-7ff5475cc9-kj4rt\" (UID: \"ffd374d3-297b-4938-8e3f-ee2ddb92cc8a\") " pod="openstack/dnsmasq-dns-7ff5475cc9-kj4rt" Sep 29 21:41:37 crc kubenswrapper[4911]: I0929 21:41:37.846039 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ffd374d3-297b-4938-8e3f-ee2ddb92cc8a-dns-svc\") pod \"dnsmasq-dns-7ff5475cc9-kj4rt\" (UID: \"ffd374d3-297b-4938-8e3f-ee2ddb92cc8a\") " pod="openstack/dnsmasq-dns-7ff5475cc9-kj4rt" Sep 29 21:41:37 crc kubenswrapper[4911]: I0929 21:41:37.846091 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ffd374d3-297b-4938-8e3f-ee2ddb92cc8a-config\") pod \"dnsmasq-dns-7ff5475cc9-kj4rt\" (UID: \"ffd374d3-297b-4938-8e3f-ee2ddb92cc8a\") " pod="openstack/dnsmasq-dns-7ff5475cc9-kj4rt" Sep 29 21:41:37 crc kubenswrapper[4911]: I0929 21:41:37.846170 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ffd374d3-297b-4938-8e3f-ee2ddb92cc8a-ovsdbserver-sb\") pod \"dnsmasq-dns-7ff5475cc9-kj4rt\" (UID: \"ffd374d3-297b-4938-8e3f-ee2ddb92cc8a\") " pod="openstack/dnsmasq-dns-7ff5475cc9-kj4rt" Sep 29 21:41:37 crc kubenswrapper[4911]: I0929 21:41:37.846224 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8mlj7\" (UniqueName: \"kubernetes.io/projected/ffd374d3-297b-4938-8e3f-ee2ddb92cc8a-kube-api-access-8mlj7\") pod \"dnsmasq-dns-7ff5475cc9-kj4rt\" (UID: \"ffd374d3-297b-4938-8e3f-ee2ddb92cc8a\") " pod="openstack/dnsmasq-dns-7ff5475cc9-kj4rt" Sep 29 21:41:37 crc kubenswrapper[4911]: I0929 21:41:37.946611 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ffd374d3-297b-4938-8e3f-ee2ddb92cc8a-config\") pod \"dnsmasq-dns-7ff5475cc9-kj4rt\" (UID: \"ffd374d3-297b-4938-8e3f-ee2ddb92cc8a\") " pod="openstack/dnsmasq-dns-7ff5475cc9-kj4rt" Sep 29 21:41:37 crc kubenswrapper[4911]: I0929 21:41:37.946683 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ffd374d3-297b-4938-8e3f-ee2ddb92cc8a-ovsdbserver-sb\") pod \"dnsmasq-dns-7ff5475cc9-kj4rt\" (UID: \"ffd374d3-297b-4938-8e3f-ee2ddb92cc8a\") " pod="openstack/dnsmasq-dns-7ff5475cc9-kj4rt" Sep 29 21:41:37 crc kubenswrapper[4911]: I0929 21:41:37.946713 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8mlj7\" (UniqueName: \"kubernetes.io/projected/ffd374d3-297b-4938-8e3f-ee2ddb92cc8a-kube-api-access-8mlj7\") pod \"dnsmasq-dns-7ff5475cc9-kj4rt\" (UID: \"ffd374d3-297b-4938-8e3f-ee2ddb92cc8a\") " pod="openstack/dnsmasq-dns-7ff5475cc9-kj4rt" Sep 29 21:41:37 crc kubenswrapper[4911]: I0929 21:41:37.946732 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ffd374d3-297b-4938-8e3f-ee2ddb92cc8a-ovsdbserver-nb\") pod \"dnsmasq-dns-7ff5475cc9-kj4rt\" (UID: \"ffd374d3-297b-4938-8e3f-ee2ddb92cc8a\") " pod="openstack/dnsmasq-dns-7ff5475cc9-kj4rt" Sep 29 21:41:37 crc kubenswrapper[4911]: I0929 21:41:37.946817 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ffd374d3-297b-4938-8e3f-ee2ddb92cc8a-dns-swift-storage-0\") pod \"dnsmasq-dns-7ff5475cc9-kj4rt\" (UID: \"ffd374d3-297b-4938-8e3f-ee2ddb92cc8a\") " pod="openstack/dnsmasq-dns-7ff5475cc9-kj4rt" Sep 29 21:41:37 crc kubenswrapper[4911]: I0929 21:41:37.946835 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ffd374d3-297b-4938-8e3f-ee2ddb92cc8a-dns-svc\") pod \"dnsmasq-dns-7ff5475cc9-kj4rt\" (UID: \"ffd374d3-297b-4938-8e3f-ee2ddb92cc8a\") " pod="openstack/dnsmasq-dns-7ff5475cc9-kj4rt" Sep 29 21:41:37 crc kubenswrapper[4911]: I0929 21:41:37.947395 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ffd374d3-297b-4938-8e3f-ee2ddb92cc8a-config\") pod \"dnsmasq-dns-7ff5475cc9-kj4rt\" (UID: \"ffd374d3-297b-4938-8e3f-ee2ddb92cc8a\") " pod="openstack/dnsmasq-dns-7ff5475cc9-kj4rt" Sep 29 21:41:37 crc kubenswrapper[4911]: I0929 21:41:37.947664 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ffd374d3-297b-4938-8e3f-ee2ddb92cc8a-ovsdbserver-nb\") pod \"dnsmasq-dns-7ff5475cc9-kj4rt\" (UID: \"ffd374d3-297b-4938-8e3f-ee2ddb92cc8a\") " pod="openstack/dnsmasq-dns-7ff5475cc9-kj4rt" Sep 29 21:41:37 crc kubenswrapper[4911]: I0929 21:41:37.947752 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ffd374d3-297b-4938-8e3f-ee2ddb92cc8a-dns-swift-storage-0\") pod \"dnsmasq-dns-7ff5475cc9-kj4rt\" (UID: \"ffd374d3-297b-4938-8e3f-ee2ddb92cc8a\") " pod="openstack/dnsmasq-dns-7ff5475cc9-kj4rt" Sep 29 21:41:37 crc kubenswrapper[4911]: I0929 21:41:37.947866 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ffd374d3-297b-4938-8e3f-ee2ddb92cc8a-ovsdbserver-sb\") pod \"dnsmasq-dns-7ff5475cc9-kj4rt\" (UID: \"ffd374d3-297b-4938-8e3f-ee2ddb92cc8a\") " pod="openstack/dnsmasq-dns-7ff5475cc9-kj4rt" Sep 29 21:41:37 crc kubenswrapper[4911]: I0929 21:41:37.947992 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ffd374d3-297b-4938-8e3f-ee2ddb92cc8a-dns-svc\") pod \"dnsmasq-dns-7ff5475cc9-kj4rt\" (UID: \"ffd374d3-297b-4938-8e3f-ee2ddb92cc8a\") " pod="openstack/dnsmasq-dns-7ff5475cc9-kj4rt" Sep 29 21:41:37 crc kubenswrapper[4911]: I0929 21:41:37.963135 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8mlj7\" (UniqueName: \"kubernetes.io/projected/ffd374d3-297b-4938-8e3f-ee2ddb92cc8a-kube-api-access-8mlj7\") pod \"dnsmasq-dns-7ff5475cc9-kj4rt\" (UID: \"ffd374d3-297b-4938-8e3f-ee2ddb92cc8a\") " pod="openstack/dnsmasq-dns-7ff5475cc9-kj4rt" Sep 29 21:41:38 crc kubenswrapper[4911]: I0929 21:41:38.138568 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7ff5475cc9-kj4rt" Sep 29 21:41:38 crc kubenswrapper[4911]: I0929 21:41:38.262581 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-wfp9c" event={"ID":"a94e08fd-5765-4631-8e54-ec22daab0ca1","Type":"ContainerDied","Data":"65dd8d0a78695736c5b436f7e26d369db6ff04ff1496c7efc2e0b58e78a16062"} Sep 29 21:41:38 crc kubenswrapper[4911]: I0929 21:41:38.262879 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="65dd8d0a78695736c5b436f7e26d369db6ff04ff1496c7efc2e0b58e78a16062" Sep 29 21:41:38 crc kubenswrapper[4911]: I0929 21:41:38.262675 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-wfp9c" Sep 29 21:41:38 crc kubenswrapper[4911]: I0929 21:41:38.499978 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7ff5475cc9-kj4rt"] Sep 29 21:41:38 crc kubenswrapper[4911]: I0929 21:41:38.536971 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-x6k2z"] Sep 29 21:41:38 crc kubenswrapper[4911]: I0929 21:41:38.541854 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-x6k2z" Sep 29 21:41:38 crc kubenswrapper[4911]: I0929 21:41:38.544543 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Sep 29 21:41:38 crc kubenswrapper[4911]: I0929 21:41:38.544859 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-bg252" Sep 29 21:41:38 crc kubenswrapper[4911]: I0929 21:41:38.544956 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Sep 29 21:41:38 crc kubenswrapper[4911]: I0929 21:41:38.546269 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5c5cc7c5ff-jdp5q"] Sep 29 21:41:38 crc kubenswrapper[4911]: I0929 21:41:38.548360 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c5cc7c5ff-jdp5q" Sep 29 21:41:38 crc kubenswrapper[4911]: I0929 21:41:38.553458 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Sep 29 21:41:38 crc kubenswrapper[4911]: I0929 21:41:38.557211 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-x6k2z"] Sep 29 21:41:38 crc kubenswrapper[4911]: I0929 21:41:38.585033 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c5cc7c5ff-jdp5q"] Sep 29 21:41:38 crc kubenswrapper[4911]: I0929 21:41:38.658358 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/f37b1e92-20d2-4105-97f3-9d42142efbf7-credential-keys\") pod \"keystone-bootstrap-x6k2z\" (UID: \"f37b1e92-20d2-4105-97f3-9d42142efbf7\") " pod="openstack/keystone-bootstrap-x6k2z" Sep 29 21:41:38 crc kubenswrapper[4911]: I0929 21:41:38.658409 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f37b1e92-20d2-4105-97f3-9d42142efbf7-scripts\") pod \"keystone-bootstrap-x6k2z\" (UID: \"f37b1e92-20d2-4105-97f3-9d42142efbf7\") " pod="openstack/keystone-bootstrap-x6k2z" Sep 29 21:41:38 crc kubenswrapper[4911]: I0929 21:41:38.658683 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bptc6\" (UniqueName: \"kubernetes.io/projected/f37b1e92-20d2-4105-97f3-9d42142efbf7-kube-api-access-bptc6\") pod \"keystone-bootstrap-x6k2z\" (UID: \"f37b1e92-20d2-4105-97f3-9d42142efbf7\") " pod="openstack/keystone-bootstrap-x6k2z" Sep 29 21:41:38 crc kubenswrapper[4911]: I0929 21:41:38.658756 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jw59s\" (UniqueName: \"kubernetes.io/projected/73453739-c89c-4f4d-b982-29761a648489-kube-api-access-jw59s\") pod \"dnsmasq-dns-5c5cc7c5ff-jdp5q\" (UID: \"73453739-c89c-4f4d-b982-29761a648489\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-jdp5q" Sep 29 21:41:38 crc kubenswrapper[4911]: I0929 21:41:38.659339 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/73453739-c89c-4f4d-b982-29761a648489-ovsdbserver-sb\") pod \"dnsmasq-dns-5c5cc7c5ff-jdp5q\" (UID: \"73453739-c89c-4f4d-b982-29761a648489\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-jdp5q" Sep 29 21:41:38 crc kubenswrapper[4911]: I0929 21:41:38.659405 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/73453739-c89c-4f4d-b982-29761a648489-ovsdbserver-nb\") pod \"dnsmasq-dns-5c5cc7c5ff-jdp5q\" (UID: \"73453739-c89c-4f4d-b982-29761a648489\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-jdp5q" Sep 29 21:41:38 crc kubenswrapper[4911]: I0929 21:41:38.659431 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f37b1e92-20d2-4105-97f3-9d42142efbf7-combined-ca-bundle\") pod \"keystone-bootstrap-x6k2z\" (UID: \"f37b1e92-20d2-4105-97f3-9d42142efbf7\") " pod="openstack/keystone-bootstrap-x6k2z" Sep 29 21:41:38 crc kubenswrapper[4911]: I0929 21:41:38.659486 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/73453739-c89c-4f4d-b982-29761a648489-dns-swift-storage-0\") pod \"dnsmasq-dns-5c5cc7c5ff-jdp5q\" (UID: \"73453739-c89c-4f4d-b982-29761a648489\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-jdp5q" Sep 29 21:41:38 crc kubenswrapper[4911]: I0929 21:41:38.659508 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/f37b1e92-20d2-4105-97f3-9d42142efbf7-fernet-keys\") pod \"keystone-bootstrap-x6k2z\" (UID: \"f37b1e92-20d2-4105-97f3-9d42142efbf7\") " pod="openstack/keystone-bootstrap-x6k2z" Sep 29 21:41:38 crc kubenswrapper[4911]: I0929 21:41:38.659530 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/73453739-c89c-4f4d-b982-29761a648489-config\") pod \"dnsmasq-dns-5c5cc7c5ff-jdp5q\" (UID: \"73453739-c89c-4f4d-b982-29761a648489\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-jdp5q" Sep 29 21:41:38 crc kubenswrapper[4911]: I0929 21:41:38.659551 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/73453739-c89c-4f4d-b982-29761a648489-dns-svc\") pod \"dnsmasq-dns-5c5cc7c5ff-jdp5q\" (UID: \"73453739-c89c-4f4d-b982-29761a648489\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-jdp5q" Sep 29 21:41:38 crc kubenswrapper[4911]: I0929 21:41:38.659582 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f37b1e92-20d2-4105-97f3-9d42142efbf7-config-data\") pod \"keystone-bootstrap-x6k2z\" (UID: \"f37b1e92-20d2-4105-97f3-9d42142efbf7\") " pod="openstack/keystone-bootstrap-x6k2z" Sep 29 21:41:38 crc kubenswrapper[4911]: I0929 21:41:38.760920 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/73453739-c89c-4f4d-b982-29761a648489-ovsdbserver-sb\") pod \"dnsmasq-dns-5c5cc7c5ff-jdp5q\" (UID: \"73453739-c89c-4f4d-b982-29761a648489\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-jdp5q" Sep 29 21:41:38 crc kubenswrapper[4911]: I0929 21:41:38.760987 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/73453739-c89c-4f4d-b982-29761a648489-ovsdbserver-nb\") pod \"dnsmasq-dns-5c5cc7c5ff-jdp5q\" (UID: \"73453739-c89c-4f4d-b982-29761a648489\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-jdp5q" Sep 29 21:41:38 crc kubenswrapper[4911]: I0929 21:41:38.761014 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f37b1e92-20d2-4105-97f3-9d42142efbf7-combined-ca-bundle\") pod \"keystone-bootstrap-x6k2z\" (UID: \"f37b1e92-20d2-4105-97f3-9d42142efbf7\") " pod="openstack/keystone-bootstrap-x6k2z" Sep 29 21:41:38 crc kubenswrapper[4911]: I0929 21:41:38.761062 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/73453739-c89c-4f4d-b982-29761a648489-dns-swift-storage-0\") pod \"dnsmasq-dns-5c5cc7c5ff-jdp5q\" (UID: \"73453739-c89c-4f4d-b982-29761a648489\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-jdp5q" Sep 29 21:41:38 crc kubenswrapper[4911]: I0929 21:41:38.761088 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/f37b1e92-20d2-4105-97f3-9d42142efbf7-fernet-keys\") pod \"keystone-bootstrap-x6k2z\" (UID: \"f37b1e92-20d2-4105-97f3-9d42142efbf7\") " pod="openstack/keystone-bootstrap-x6k2z" Sep 29 21:41:38 crc kubenswrapper[4911]: I0929 21:41:38.761112 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/73453739-c89c-4f4d-b982-29761a648489-config\") pod \"dnsmasq-dns-5c5cc7c5ff-jdp5q\" (UID: \"73453739-c89c-4f4d-b982-29761a648489\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-jdp5q" Sep 29 21:41:38 crc kubenswrapper[4911]: I0929 21:41:38.761135 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/73453739-c89c-4f4d-b982-29761a648489-dns-svc\") pod \"dnsmasq-dns-5c5cc7c5ff-jdp5q\" (UID: \"73453739-c89c-4f4d-b982-29761a648489\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-jdp5q" Sep 29 21:41:38 crc kubenswrapper[4911]: I0929 21:41:38.761166 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f37b1e92-20d2-4105-97f3-9d42142efbf7-config-data\") pod \"keystone-bootstrap-x6k2z\" (UID: \"f37b1e92-20d2-4105-97f3-9d42142efbf7\") " pod="openstack/keystone-bootstrap-x6k2z" Sep 29 21:41:38 crc kubenswrapper[4911]: I0929 21:41:38.761190 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/f37b1e92-20d2-4105-97f3-9d42142efbf7-credential-keys\") pod \"keystone-bootstrap-x6k2z\" (UID: \"f37b1e92-20d2-4105-97f3-9d42142efbf7\") " pod="openstack/keystone-bootstrap-x6k2z" Sep 29 21:41:38 crc kubenswrapper[4911]: I0929 21:41:38.761227 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f37b1e92-20d2-4105-97f3-9d42142efbf7-scripts\") pod \"keystone-bootstrap-x6k2z\" (UID: \"f37b1e92-20d2-4105-97f3-9d42142efbf7\") " pod="openstack/keystone-bootstrap-x6k2z" Sep 29 21:41:38 crc kubenswrapper[4911]: I0929 21:41:38.761561 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bptc6\" (UniqueName: \"kubernetes.io/projected/f37b1e92-20d2-4105-97f3-9d42142efbf7-kube-api-access-bptc6\") pod \"keystone-bootstrap-x6k2z\" (UID: \"f37b1e92-20d2-4105-97f3-9d42142efbf7\") " pod="openstack/keystone-bootstrap-x6k2z" Sep 29 21:41:38 crc kubenswrapper[4911]: I0929 21:41:38.762735 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jw59s\" (UniqueName: \"kubernetes.io/projected/73453739-c89c-4f4d-b982-29761a648489-kube-api-access-jw59s\") pod \"dnsmasq-dns-5c5cc7c5ff-jdp5q\" (UID: \"73453739-c89c-4f4d-b982-29761a648489\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-jdp5q" Sep 29 21:41:38 crc kubenswrapper[4911]: I0929 21:41:38.762834 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/73453739-c89c-4f4d-b982-29761a648489-config\") pod \"dnsmasq-dns-5c5cc7c5ff-jdp5q\" (UID: \"73453739-c89c-4f4d-b982-29761a648489\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-jdp5q" Sep 29 21:41:38 crc kubenswrapper[4911]: I0929 21:41:38.762419 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/73453739-c89c-4f4d-b982-29761a648489-dns-swift-storage-0\") pod \"dnsmasq-dns-5c5cc7c5ff-jdp5q\" (UID: \"73453739-c89c-4f4d-b982-29761a648489\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-jdp5q" Sep 29 21:41:38 crc kubenswrapper[4911]: I0929 21:41:38.762057 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/73453739-c89c-4f4d-b982-29761a648489-ovsdbserver-sb\") pod \"dnsmasq-dns-5c5cc7c5ff-jdp5q\" (UID: \"73453739-c89c-4f4d-b982-29761a648489\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-jdp5q" Sep 29 21:41:38 crc kubenswrapper[4911]: I0929 21:41:38.762115 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/73453739-c89c-4f4d-b982-29761a648489-ovsdbserver-nb\") pod \"dnsmasq-dns-5c5cc7c5ff-jdp5q\" (UID: \"73453739-c89c-4f4d-b982-29761a648489\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-jdp5q" Sep 29 21:41:38 crc kubenswrapper[4911]: I0929 21:41:38.763126 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/73453739-c89c-4f4d-b982-29761a648489-dns-svc\") pod \"dnsmasq-dns-5c5cc7c5ff-jdp5q\" (UID: \"73453739-c89c-4f4d-b982-29761a648489\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-jdp5q" Sep 29 21:41:38 crc kubenswrapper[4911]: I0929 21:41:38.767060 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/f37b1e92-20d2-4105-97f3-9d42142efbf7-credential-keys\") pod \"keystone-bootstrap-x6k2z\" (UID: \"f37b1e92-20d2-4105-97f3-9d42142efbf7\") " pod="openstack/keystone-bootstrap-x6k2z" Sep 29 21:41:38 crc kubenswrapper[4911]: I0929 21:41:38.767143 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/f37b1e92-20d2-4105-97f3-9d42142efbf7-fernet-keys\") pod \"keystone-bootstrap-x6k2z\" (UID: \"f37b1e92-20d2-4105-97f3-9d42142efbf7\") " pod="openstack/keystone-bootstrap-x6k2z" Sep 29 21:41:38 crc kubenswrapper[4911]: I0929 21:41:38.770248 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f37b1e92-20d2-4105-97f3-9d42142efbf7-scripts\") pod \"keystone-bootstrap-x6k2z\" (UID: \"f37b1e92-20d2-4105-97f3-9d42142efbf7\") " pod="openstack/keystone-bootstrap-x6k2z" Sep 29 21:41:38 crc kubenswrapper[4911]: I0929 21:41:38.770948 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f37b1e92-20d2-4105-97f3-9d42142efbf7-combined-ca-bundle\") pod \"keystone-bootstrap-x6k2z\" (UID: \"f37b1e92-20d2-4105-97f3-9d42142efbf7\") " pod="openstack/keystone-bootstrap-x6k2z" Sep 29 21:41:38 crc kubenswrapper[4911]: I0929 21:41:38.777440 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f37b1e92-20d2-4105-97f3-9d42142efbf7-config-data\") pod \"keystone-bootstrap-x6k2z\" (UID: \"f37b1e92-20d2-4105-97f3-9d42142efbf7\") " pod="openstack/keystone-bootstrap-x6k2z" Sep 29 21:41:38 crc kubenswrapper[4911]: I0929 21:41:38.778593 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jw59s\" (UniqueName: \"kubernetes.io/projected/73453739-c89c-4f4d-b982-29761a648489-kube-api-access-jw59s\") pod \"dnsmasq-dns-5c5cc7c5ff-jdp5q\" (UID: \"73453739-c89c-4f4d-b982-29761a648489\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-jdp5q" Sep 29 21:41:38 crc kubenswrapper[4911]: I0929 21:41:38.779998 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bptc6\" (UniqueName: \"kubernetes.io/projected/f37b1e92-20d2-4105-97f3-9d42142efbf7-kube-api-access-bptc6\") pod \"keystone-bootstrap-x6k2z\" (UID: \"f37b1e92-20d2-4105-97f3-9d42142efbf7\") " pod="openstack/keystone-bootstrap-x6k2z" Sep 29 21:41:38 crc kubenswrapper[4911]: I0929 21:41:38.864195 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-x6k2z" Sep 29 21:41:38 crc kubenswrapper[4911]: I0929 21:41:38.876374 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c5cc7c5ff-jdp5q" Sep 29 21:41:38 crc kubenswrapper[4911]: I0929 21:41:38.929772 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c5cc7c5ff-jdp5q"] Sep 29 21:41:38 crc kubenswrapper[4911]: I0929 21:41:38.968027 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-xpfbw"] Sep 29 21:41:38 crc kubenswrapper[4911]: I0929 21:41:38.970595 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-xpfbw" Sep 29 21:41:38 crc kubenswrapper[4911]: I0929 21:41:38.973054 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Sep 29 21:41:38 crc kubenswrapper[4911]: I0929 21:41:38.973168 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Sep 29 21:41:38 crc kubenswrapper[4911]: I0929 21:41:38.973327 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-6hf8s" Sep 29 21:41:38 crc kubenswrapper[4911]: I0929 21:41:38.982198 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-pwvb6"] Sep 29 21:41:38 crc kubenswrapper[4911]: I0929 21:41:38.985384 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b5c85b87-pwvb6" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:38.996399 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-xpfbw"] Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.008740 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-pwvb6"] Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.069000 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7dee5647-4d60-4283-b5a0-79cc059d340f-ovsdbserver-sb\") pod \"dnsmasq-dns-8b5c85b87-pwvb6\" (UID: \"7dee5647-4d60-4283-b5a0-79cc059d340f\") " pod="openstack/dnsmasq-dns-8b5c85b87-pwvb6" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.069297 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/373766c8-b8c6-4f57-b43b-24667ddb9564-combined-ca-bundle\") pod \"placement-db-sync-xpfbw\" (UID: \"373766c8-b8c6-4f57-b43b-24667ddb9564\") " pod="openstack/placement-db-sync-xpfbw" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.069323 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7dee5647-4d60-4283-b5a0-79cc059d340f-ovsdbserver-nb\") pod \"dnsmasq-dns-8b5c85b87-pwvb6\" (UID: \"7dee5647-4d60-4283-b5a0-79cc059d340f\") " pod="openstack/dnsmasq-dns-8b5c85b87-pwvb6" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.069343 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-86prj\" (UniqueName: \"kubernetes.io/projected/7dee5647-4d60-4283-b5a0-79cc059d340f-kube-api-access-86prj\") pod \"dnsmasq-dns-8b5c85b87-pwvb6\" (UID: \"7dee5647-4d60-4283-b5a0-79cc059d340f\") " pod="openstack/dnsmasq-dns-8b5c85b87-pwvb6" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.069381 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/373766c8-b8c6-4f57-b43b-24667ddb9564-logs\") pod \"placement-db-sync-xpfbw\" (UID: \"373766c8-b8c6-4f57-b43b-24667ddb9564\") " pod="openstack/placement-db-sync-xpfbw" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.069426 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zxq2g\" (UniqueName: \"kubernetes.io/projected/373766c8-b8c6-4f57-b43b-24667ddb9564-kube-api-access-zxq2g\") pod \"placement-db-sync-xpfbw\" (UID: \"373766c8-b8c6-4f57-b43b-24667ddb9564\") " pod="openstack/placement-db-sync-xpfbw" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.069450 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7dee5647-4d60-4283-b5a0-79cc059d340f-config\") pod \"dnsmasq-dns-8b5c85b87-pwvb6\" (UID: \"7dee5647-4d60-4283-b5a0-79cc059d340f\") " pod="openstack/dnsmasq-dns-8b5c85b87-pwvb6" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.069502 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7dee5647-4d60-4283-b5a0-79cc059d340f-dns-svc\") pod \"dnsmasq-dns-8b5c85b87-pwvb6\" (UID: \"7dee5647-4d60-4283-b5a0-79cc059d340f\") " pod="openstack/dnsmasq-dns-8b5c85b87-pwvb6" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.069551 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/373766c8-b8c6-4f57-b43b-24667ddb9564-config-data\") pod \"placement-db-sync-xpfbw\" (UID: \"373766c8-b8c6-4f57-b43b-24667ddb9564\") " pod="openstack/placement-db-sync-xpfbw" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.069576 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7dee5647-4d60-4283-b5a0-79cc059d340f-dns-swift-storage-0\") pod \"dnsmasq-dns-8b5c85b87-pwvb6\" (UID: \"7dee5647-4d60-4283-b5a0-79cc059d340f\") " pod="openstack/dnsmasq-dns-8b5c85b87-pwvb6" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.069604 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/373766c8-b8c6-4f57-b43b-24667ddb9564-scripts\") pod \"placement-db-sync-xpfbw\" (UID: \"373766c8-b8c6-4f57-b43b-24667ddb9564\") " pod="openstack/placement-db-sync-xpfbw" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.171243 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/373766c8-b8c6-4f57-b43b-24667ddb9564-combined-ca-bundle\") pod \"placement-db-sync-xpfbw\" (UID: \"373766c8-b8c6-4f57-b43b-24667ddb9564\") " pod="openstack/placement-db-sync-xpfbw" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.171296 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7dee5647-4d60-4283-b5a0-79cc059d340f-ovsdbserver-nb\") pod \"dnsmasq-dns-8b5c85b87-pwvb6\" (UID: \"7dee5647-4d60-4283-b5a0-79cc059d340f\") " pod="openstack/dnsmasq-dns-8b5c85b87-pwvb6" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.171320 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-86prj\" (UniqueName: \"kubernetes.io/projected/7dee5647-4d60-4283-b5a0-79cc059d340f-kube-api-access-86prj\") pod \"dnsmasq-dns-8b5c85b87-pwvb6\" (UID: \"7dee5647-4d60-4283-b5a0-79cc059d340f\") " pod="openstack/dnsmasq-dns-8b5c85b87-pwvb6" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.171367 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/373766c8-b8c6-4f57-b43b-24667ddb9564-logs\") pod \"placement-db-sync-xpfbw\" (UID: \"373766c8-b8c6-4f57-b43b-24667ddb9564\") " pod="openstack/placement-db-sync-xpfbw" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.171389 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zxq2g\" (UniqueName: \"kubernetes.io/projected/373766c8-b8c6-4f57-b43b-24667ddb9564-kube-api-access-zxq2g\") pod \"placement-db-sync-xpfbw\" (UID: \"373766c8-b8c6-4f57-b43b-24667ddb9564\") " pod="openstack/placement-db-sync-xpfbw" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.171413 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7dee5647-4d60-4283-b5a0-79cc059d340f-config\") pod \"dnsmasq-dns-8b5c85b87-pwvb6\" (UID: \"7dee5647-4d60-4283-b5a0-79cc059d340f\") " pod="openstack/dnsmasq-dns-8b5c85b87-pwvb6" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.171443 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7dee5647-4d60-4283-b5a0-79cc059d340f-dns-svc\") pod \"dnsmasq-dns-8b5c85b87-pwvb6\" (UID: \"7dee5647-4d60-4283-b5a0-79cc059d340f\") " pod="openstack/dnsmasq-dns-8b5c85b87-pwvb6" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.171497 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/373766c8-b8c6-4f57-b43b-24667ddb9564-config-data\") pod \"placement-db-sync-xpfbw\" (UID: \"373766c8-b8c6-4f57-b43b-24667ddb9564\") " pod="openstack/placement-db-sync-xpfbw" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.171514 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7dee5647-4d60-4283-b5a0-79cc059d340f-dns-swift-storage-0\") pod \"dnsmasq-dns-8b5c85b87-pwvb6\" (UID: \"7dee5647-4d60-4283-b5a0-79cc059d340f\") " pod="openstack/dnsmasq-dns-8b5c85b87-pwvb6" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.171544 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/373766c8-b8c6-4f57-b43b-24667ddb9564-scripts\") pod \"placement-db-sync-xpfbw\" (UID: \"373766c8-b8c6-4f57-b43b-24667ddb9564\") " pod="openstack/placement-db-sync-xpfbw" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.171565 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7dee5647-4d60-4283-b5a0-79cc059d340f-ovsdbserver-sb\") pod \"dnsmasq-dns-8b5c85b87-pwvb6\" (UID: \"7dee5647-4d60-4283-b5a0-79cc059d340f\") " pod="openstack/dnsmasq-dns-8b5c85b87-pwvb6" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.172043 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/373766c8-b8c6-4f57-b43b-24667ddb9564-logs\") pod \"placement-db-sync-xpfbw\" (UID: \"373766c8-b8c6-4f57-b43b-24667ddb9564\") " pod="openstack/placement-db-sync-xpfbw" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.172683 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7dee5647-4d60-4283-b5a0-79cc059d340f-ovsdbserver-nb\") pod \"dnsmasq-dns-8b5c85b87-pwvb6\" (UID: \"7dee5647-4d60-4283-b5a0-79cc059d340f\") " pod="openstack/dnsmasq-dns-8b5c85b87-pwvb6" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.172716 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7dee5647-4d60-4283-b5a0-79cc059d340f-config\") pod \"dnsmasq-dns-8b5c85b87-pwvb6\" (UID: \"7dee5647-4d60-4283-b5a0-79cc059d340f\") " pod="openstack/dnsmasq-dns-8b5c85b87-pwvb6" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.172922 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7dee5647-4d60-4283-b5a0-79cc059d340f-dns-svc\") pod \"dnsmasq-dns-8b5c85b87-pwvb6\" (UID: \"7dee5647-4d60-4283-b5a0-79cc059d340f\") " pod="openstack/dnsmasq-dns-8b5c85b87-pwvb6" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.173932 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7dee5647-4d60-4283-b5a0-79cc059d340f-ovsdbserver-sb\") pod \"dnsmasq-dns-8b5c85b87-pwvb6\" (UID: \"7dee5647-4d60-4283-b5a0-79cc059d340f\") " pod="openstack/dnsmasq-dns-8b5c85b87-pwvb6" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.176905 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/373766c8-b8c6-4f57-b43b-24667ddb9564-config-data\") pod \"placement-db-sync-xpfbw\" (UID: \"373766c8-b8c6-4f57-b43b-24667ddb9564\") " pod="openstack/placement-db-sync-xpfbw" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.177018 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7dee5647-4d60-4283-b5a0-79cc059d340f-dns-swift-storage-0\") pod \"dnsmasq-dns-8b5c85b87-pwvb6\" (UID: \"7dee5647-4d60-4283-b5a0-79cc059d340f\") " pod="openstack/dnsmasq-dns-8b5c85b87-pwvb6" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.178358 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/373766c8-b8c6-4f57-b43b-24667ddb9564-combined-ca-bundle\") pod \"placement-db-sync-xpfbw\" (UID: \"373766c8-b8c6-4f57-b43b-24667ddb9564\") " pod="openstack/placement-db-sync-xpfbw" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.180245 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/373766c8-b8c6-4f57-b43b-24667ddb9564-scripts\") pod \"placement-db-sync-xpfbw\" (UID: \"373766c8-b8c6-4f57-b43b-24667ddb9564\") " pod="openstack/placement-db-sync-xpfbw" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.188080 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zxq2g\" (UniqueName: \"kubernetes.io/projected/373766c8-b8c6-4f57-b43b-24667ddb9564-kube-api-access-zxq2g\") pod \"placement-db-sync-xpfbw\" (UID: \"373766c8-b8c6-4f57-b43b-24667ddb9564\") " pod="openstack/placement-db-sync-xpfbw" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.197057 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-86prj\" (UniqueName: \"kubernetes.io/projected/7dee5647-4d60-4283-b5a0-79cc059d340f-kube-api-access-86prj\") pod \"dnsmasq-dns-8b5c85b87-pwvb6\" (UID: \"7dee5647-4d60-4283-b5a0-79cc059d340f\") " pod="openstack/dnsmasq-dns-8b5c85b87-pwvb6" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.231280 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7ff5475cc9-kj4rt"] Sep 29 21:41:39 crc kubenswrapper[4911]: W0929 21:41:39.240502 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podffd374d3_297b_4938_8e3f_ee2ddb92cc8a.slice/crio-1db4d2ad8ee6e6cd14de6dc9eaa8838be33f184ae2650f59604c0c2feb5f21c4 WatchSource:0}: Error finding container 1db4d2ad8ee6e6cd14de6dc9eaa8838be33f184ae2650f59604c0c2feb5f21c4: Status 404 returned error can't find the container with id 1db4d2ad8ee6e6cd14de6dc9eaa8838be33f184ae2650f59604c0c2feb5f21c4 Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.277512 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7ff5475cc9-kj4rt" event={"ID":"ffd374d3-297b-4938-8e3f-ee2ddb92cc8a","Type":"ContainerStarted","Data":"1db4d2ad8ee6e6cd14de6dc9eaa8838be33f184ae2650f59604c0c2feb5f21c4"} Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.309773 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-xpfbw" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.330391 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b5c85b87-pwvb6" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.452126 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c5cc7c5ff-jdp5q"] Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.460065 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-x6k2z"] Sep 29 21:41:39 crc kubenswrapper[4911]: W0929 21:41:39.470145 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf37b1e92_20d2_4105_97f3_9d42142efbf7.slice/crio-09cbc54f135ad002ff887dbeadae6f07a326d2fa93f02a542c7a0d5b39571d2f WatchSource:0}: Error finding container 09cbc54f135ad002ff887dbeadae6f07a326d2fa93f02a542c7a0d5b39571d2f: Status 404 returned error can't find the container with id 09cbc54f135ad002ff887dbeadae6f07a326d2fa93f02a542c7a0d5b39571d2f Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.668014 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.669634 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.673233 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.673416 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-lw9bf" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.674404 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.712352 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.727134 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.728534 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.730662 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.783614 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c8e8ce76-e024-48e1-a8ff-9648934557fa-logs\") pod \"glance-default-external-api-0\" (UID: \"c8e8ce76-e024-48e1-a8ff-9648934557fa\") " pod="openstack/glance-default-external-api-0" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.783666 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c8e8ce76-e024-48e1-a8ff-9648934557fa-scripts\") pod \"glance-default-external-api-0\" (UID: \"c8e8ce76-e024-48e1-a8ff-9648934557fa\") " pod="openstack/glance-default-external-api-0" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.783686 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c8e8ce76-e024-48e1-a8ff-9648934557fa-config-data\") pod \"glance-default-external-api-0\" (UID: \"c8e8ce76-e024-48e1-a8ff-9648934557fa\") " pod="openstack/glance-default-external-api-0" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.783713 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rkfhn\" (UniqueName: \"kubernetes.io/projected/c8e8ce76-e024-48e1-a8ff-9648934557fa-kube-api-access-rkfhn\") pod \"glance-default-external-api-0\" (UID: \"c8e8ce76-e024-48e1-a8ff-9648934557fa\") " pod="openstack/glance-default-external-api-0" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.783734 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c8e8ce76-e024-48e1-a8ff-9648934557fa-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"c8e8ce76-e024-48e1-a8ff-9648934557fa\") " pod="openstack/glance-default-external-api-0" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.783770 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8e8ce76-e024-48e1-a8ff-9648934557fa-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"c8e8ce76-e024-48e1-a8ff-9648934557fa\") " pod="openstack/glance-default-external-api-0" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.783948 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"c8e8ce76-e024-48e1-a8ff-9648934557fa\") " pod="openstack/glance-default-external-api-0" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.793604 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.848579 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-xpfbw"] Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.867284 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.877467 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.879434 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.885197 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.886206 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/334714b6-2be3-4276-bebc-bda7002fdb26-config-data\") pod \"glance-default-internal-api-0\" (UID: \"334714b6-2be3-4276-bebc-bda7002fdb26\") " pod="openstack/glance-default-internal-api-0" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.886293 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/334714b6-2be3-4276-bebc-bda7002fdb26-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"334714b6-2be3-4276-bebc-bda7002fdb26\") " pod="openstack/glance-default-internal-api-0" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.886378 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"c8e8ce76-e024-48e1-a8ff-9648934557fa\") " pod="openstack/glance-default-external-api-0" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.886443 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"334714b6-2be3-4276-bebc-bda7002fdb26\") " pod="openstack/glance-default-internal-api-0" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.886482 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/334714b6-2be3-4276-bebc-bda7002fdb26-scripts\") pod \"glance-default-internal-api-0\" (UID: \"334714b6-2be3-4276-bebc-bda7002fdb26\") " pod="openstack/glance-default-internal-api-0" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.886547 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c8e8ce76-e024-48e1-a8ff-9648934557fa-logs\") pod \"glance-default-external-api-0\" (UID: \"c8e8ce76-e024-48e1-a8ff-9648934557fa\") " pod="openstack/glance-default-external-api-0" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.886581 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c8e8ce76-e024-48e1-a8ff-9648934557fa-scripts\") pod \"glance-default-external-api-0\" (UID: \"c8e8ce76-e024-48e1-a8ff-9648934557fa\") " pod="openstack/glance-default-external-api-0" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.886602 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c8e8ce76-e024-48e1-a8ff-9648934557fa-config-data\") pod \"glance-default-external-api-0\" (UID: \"c8e8ce76-e024-48e1-a8ff-9648934557fa\") " pod="openstack/glance-default-external-api-0" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.886632 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rkfhn\" (UniqueName: \"kubernetes.io/projected/c8e8ce76-e024-48e1-a8ff-9648934557fa-kube-api-access-rkfhn\") pod \"glance-default-external-api-0\" (UID: \"c8e8ce76-e024-48e1-a8ff-9648934557fa\") " pod="openstack/glance-default-external-api-0" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.886658 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c8e8ce76-e024-48e1-a8ff-9648934557fa-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"c8e8ce76-e024-48e1-a8ff-9648934557fa\") " pod="openstack/glance-default-external-api-0" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.886680 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/334714b6-2be3-4276-bebc-bda7002fdb26-logs\") pod \"glance-default-internal-api-0\" (UID: \"334714b6-2be3-4276-bebc-bda7002fdb26\") " pod="openstack/glance-default-internal-api-0" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.886725 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/334714b6-2be3-4276-bebc-bda7002fdb26-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"334714b6-2be3-4276-bebc-bda7002fdb26\") " pod="openstack/glance-default-internal-api-0" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.886732 4911 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"c8e8ce76-e024-48e1-a8ff-9648934557fa\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/glance-default-external-api-0" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.886746 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x7lff\" (UniqueName: \"kubernetes.io/projected/334714b6-2be3-4276-bebc-bda7002fdb26-kube-api-access-x7lff\") pod \"glance-default-internal-api-0\" (UID: \"334714b6-2be3-4276-bebc-bda7002fdb26\") " pod="openstack/glance-default-internal-api-0" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.886773 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8e8ce76-e024-48e1-a8ff-9648934557fa-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"c8e8ce76-e024-48e1-a8ff-9648934557fa\") " pod="openstack/glance-default-external-api-0" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.887288 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.914155 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c8e8ce76-e024-48e1-a8ff-9648934557fa-logs\") pod \"glance-default-external-api-0\" (UID: \"c8e8ce76-e024-48e1-a8ff-9648934557fa\") " pod="openstack/glance-default-external-api-0" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.924114 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c8e8ce76-e024-48e1-a8ff-9648934557fa-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"c8e8ce76-e024-48e1-a8ff-9648934557fa\") " pod="openstack/glance-default-external-api-0" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.926981 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8e8ce76-e024-48e1-a8ff-9648934557fa-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"c8e8ce76-e024-48e1-a8ff-9648934557fa\") " pod="openstack/glance-default-external-api-0" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.928740 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c8e8ce76-e024-48e1-a8ff-9648934557fa-config-data\") pod \"glance-default-external-api-0\" (UID: \"c8e8ce76-e024-48e1-a8ff-9648934557fa\") " pod="openstack/glance-default-external-api-0" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.928886 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rkfhn\" (UniqueName: \"kubernetes.io/projected/c8e8ce76-e024-48e1-a8ff-9648934557fa-kube-api-access-rkfhn\") pod \"glance-default-external-api-0\" (UID: \"c8e8ce76-e024-48e1-a8ff-9648934557fa\") " pod="openstack/glance-default-external-api-0" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.934720 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-pwvb6"] Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.949553 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"c8e8ce76-e024-48e1-a8ff-9648934557fa\") " pod="openstack/glance-default-external-api-0" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.950898 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c8e8ce76-e024-48e1-a8ff-9648934557fa-scripts\") pod \"glance-default-external-api-0\" (UID: \"c8e8ce76-e024-48e1-a8ff-9648934557fa\") " pod="openstack/glance-default-external-api-0" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.988315 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"334714b6-2be3-4276-bebc-bda7002fdb26\") " pod="openstack/glance-default-internal-api-0" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.988419 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/334714b6-2be3-4276-bebc-bda7002fdb26-scripts\") pod \"glance-default-internal-api-0\" (UID: \"334714b6-2be3-4276-bebc-bda7002fdb26\") " pod="openstack/glance-default-internal-api-0" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.988493 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wtsmx\" (UniqueName: \"kubernetes.io/projected/d12bb9a1-941b-41be-8cb9-3f274e27e497-kube-api-access-wtsmx\") pod \"ceilometer-0\" (UID: \"d12bb9a1-941b-41be-8cb9-3f274e27e497\") " pod="openstack/ceilometer-0" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.988554 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d12bb9a1-941b-41be-8cb9-3f274e27e497-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d12bb9a1-941b-41be-8cb9-3f274e27e497\") " pod="openstack/ceilometer-0" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.988647 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d12bb9a1-941b-41be-8cb9-3f274e27e497-log-httpd\") pod \"ceilometer-0\" (UID: \"d12bb9a1-941b-41be-8cb9-3f274e27e497\") " pod="openstack/ceilometer-0" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.988727 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d12bb9a1-941b-41be-8cb9-3f274e27e497-run-httpd\") pod \"ceilometer-0\" (UID: \"d12bb9a1-941b-41be-8cb9-3f274e27e497\") " pod="openstack/ceilometer-0" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.989410 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d12bb9a1-941b-41be-8cb9-3f274e27e497-config-data\") pod \"ceilometer-0\" (UID: \"d12bb9a1-941b-41be-8cb9-3f274e27e497\") " pod="openstack/ceilometer-0" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.989543 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/334714b6-2be3-4276-bebc-bda7002fdb26-logs\") pod \"glance-default-internal-api-0\" (UID: \"334714b6-2be3-4276-bebc-bda7002fdb26\") " pod="openstack/glance-default-internal-api-0" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.989643 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/334714b6-2be3-4276-bebc-bda7002fdb26-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"334714b6-2be3-4276-bebc-bda7002fdb26\") " pod="openstack/glance-default-internal-api-0" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.989780 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x7lff\" (UniqueName: \"kubernetes.io/projected/334714b6-2be3-4276-bebc-bda7002fdb26-kube-api-access-x7lff\") pod \"glance-default-internal-api-0\" (UID: \"334714b6-2be3-4276-bebc-bda7002fdb26\") " pod="openstack/glance-default-internal-api-0" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.989865 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d12bb9a1-941b-41be-8cb9-3f274e27e497-scripts\") pod \"ceilometer-0\" (UID: \"d12bb9a1-941b-41be-8cb9-3f274e27e497\") " pod="openstack/ceilometer-0" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.989931 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/334714b6-2be3-4276-bebc-bda7002fdb26-config-data\") pod \"glance-default-internal-api-0\" (UID: \"334714b6-2be3-4276-bebc-bda7002fdb26\") " pod="openstack/glance-default-internal-api-0" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.990013 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d12bb9a1-941b-41be-8cb9-3f274e27e497-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d12bb9a1-941b-41be-8cb9-3f274e27e497\") " pod="openstack/ceilometer-0" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.990078 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/334714b6-2be3-4276-bebc-bda7002fdb26-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"334714b6-2be3-4276-bebc-bda7002fdb26\") " pod="openstack/glance-default-internal-api-0" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.992661 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/334714b6-2be3-4276-bebc-bda7002fdb26-logs\") pod \"glance-default-internal-api-0\" (UID: \"334714b6-2be3-4276-bebc-bda7002fdb26\") " pod="openstack/glance-default-internal-api-0" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.993279 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/334714b6-2be3-4276-bebc-bda7002fdb26-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"334714b6-2be3-4276-bebc-bda7002fdb26\") " pod="openstack/glance-default-internal-api-0" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.993744 4911 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"334714b6-2be3-4276-bebc-bda7002fdb26\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/glance-default-internal-api-0" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.999438 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/334714b6-2be3-4276-bebc-bda7002fdb26-scripts\") pod \"glance-default-internal-api-0\" (UID: \"334714b6-2be3-4276-bebc-bda7002fdb26\") " pod="openstack/glance-default-internal-api-0" Sep 29 21:41:39 crc kubenswrapper[4911]: I0929 21:41:39.999615 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/334714b6-2be3-4276-bebc-bda7002fdb26-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"334714b6-2be3-4276-bebc-bda7002fdb26\") " pod="openstack/glance-default-internal-api-0" Sep 29 21:41:40 crc kubenswrapper[4911]: I0929 21:41:40.001507 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/334714b6-2be3-4276-bebc-bda7002fdb26-config-data\") pod \"glance-default-internal-api-0\" (UID: \"334714b6-2be3-4276-bebc-bda7002fdb26\") " pod="openstack/glance-default-internal-api-0" Sep 29 21:41:40 crc kubenswrapper[4911]: I0929 21:41:40.014993 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x7lff\" (UniqueName: \"kubernetes.io/projected/334714b6-2be3-4276-bebc-bda7002fdb26-kube-api-access-x7lff\") pod \"glance-default-internal-api-0\" (UID: \"334714b6-2be3-4276-bebc-bda7002fdb26\") " pod="openstack/glance-default-internal-api-0" Sep 29 21:41:40 crc kubenswrapper[4911]: I0929 21:41:40.030129 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"334714b6-2be3-4276-bebc-bda7002fdb26\") " pod="openstack/glance-default-internal-api-0" Sep 29 21:41:40 crc kubenswrapper[4911]: I0929 21:41:40.052565 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 21:41:40 crc kubenswrapper[4911]: I0929 21:41:40.075412 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 21:41:40 crc kubenswrapper[4911]: I0929 21:41:40.091554 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d12bb9a1-941b-41be-8cb9-3f274e27e497-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d12bb9a1-941b-41be-8cb9-3f274e27e497\") " pod="openstack/ceilometer-0" Sep 29 21:41:40 crc kubenswrapper[4911]: I0929 21:41:40.091698 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wtsmx\" (UniqueName: \"kubernetes.io/projected/d12bb9a1-941b-41be-8cb9-3f274e27e497-kube-api-access-wtsmx\") pod \"ceilometer-0\" (UID: \"d12bb9a1-941b-41be-8cb9-3f274e27e497\") " pod="openstack/ceilometer-0" Sep 29 21:41:40 crc kubenswrapper[4911]: I0929 21:41:40.091723 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d12bb9a1-941b-41be-8cb9-3f274e27e497-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d12bb9a1-941b-41be-8cb9-3f274e27e497\") " pod="openstack/ceilometer-0" Sep 29 21:41:40 crc kubenswrapper[4911]: I0929 21:41:40.091752 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d12bb9a1-941b-41be-8cb9-3f274e27e497-log-httpd\") pod \"ceilometer-0\" (UID: \"d12bb9a1-941b-41be-8cb9-3f274e27e497\") " pod="openstack/ceilometer-0" Sep 29 21:41:40 crc kubenswrapper[4911]: I0929 21:41:40.091772 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d12bb9a1-941b-41be-8cb9-3f274e27e497-run-httpd\") pod \"ceilometer-0\" (UID: \"d12bb9a1-941b-41be-8cb9-3f274e27e497\") " pod="openstack/ceilometer-0" Sep 29 21:41:40 crc kubenswrapper[4911]: I0929 21:41:40.091830 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d12bb9a1-941b-41be-8cb9-3f274e27e497-config-data\") pod \"ceilometer-0\" (UID: \"d12bb9a1-941b-41be-8cb9-3f274e27e497\") " pod="openstack/ceilometer-0" Sep 29 21:41:40 crc kubenswrapper[4911]: I0929 21:41:40.091867 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d12bb9a1-941b-41be-8cb9-3f274e27e497-scripts\") pod \"ceilometer-0\" (UID: \"d12bb9a1-941b-41be-8cb9-3f274e27e497\") " pod="openstack/ceilometer-0" Sep 29 21:41:40 crc kubenswrapper[4911]: I0929 21:41:40.094072 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d12bb9a1-941b-41be-8cb9-3f274e27e497-run-httpd\") pod \"ceilometer-0\" (UID: \"d12bb9a1-941b-41be-8cb9-3f274e27e497\") " pod="openstack/ceilometer-0" Sep 29 21:41:40 crc kubenswrapper[4911]: I0929 21:41:40.094469 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d12bb9a1-941b-41be-8cb9-3f274e27e497-log-httpd\") pod \"ceilometer-0\" (UID: \"d12bb9a1-941b-41be-8cb9-3f274e27e497\") " pod="openstack/ceilometer-0" Sep 29 21:41:40 crc kubenswrapper[4911]: I0929 21:41:40.096560 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d12bb9a1-941b-41be-8cb9-3f274e27e497-scripts\") pod \"ceilometer-0\" (UID: \"d12bb9a1-941b-41be-8cb9-3f274e27e497\") " pod="openstack/ceilometer-0" Sep 29 21:41:40 crc kubenswrapper[4911]: I0929 21:41:40.097508 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d12bb9a1-941b-41be-8cb9-3f274e27e497-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d12bb9a1-941b-41be-8cb9-3f274e27e497\") " pod="openstack/ceilometer-0" Sep 29 21:41:40 crc kubenswrapper[4911]: I0929 21:41:40.097628 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d12bb9a1-941b-41be-8cb9-3f274e27e497-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d12bb9a1-941b-41be-8cb9-3f274e27e497\") " pod="openstack/ceilometer-0" Sep 29 21:41:40 crc kubenswrapper[4911]: I0929 21:41:40.102242 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d12bb9a1-941b-41be-8cb9-3f274e27e497-config-data\") pod \"ceilometer-0\" (UID: \"d12bb9a1-941b-41be-8cb9-3f274e27e497\") " pod="openstack/ceilometer-0" Sep 29 21:41:40 crc kubenswrapper[4911]: I0929 21:41:40.110491 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wtsmx\" (UniqueName: \"kubernetes.io/projected/d12bb9a1-941b-41be-8cb9-3f274e27e497-kube-api-access-wtsmx\") pod \"ceilometer-0\" (UID: \"d12bb9a1-941b-41be-8cb9-3f274e27e497\") " pod="openstack/ceilometer-0" Sep 29 21:41:40 crc kubenswrapper[4911]: I0929 21:41:40.214398 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 21:41:40 crc kubenswrapper[4911]: I0929 21:41:40.290671 4911 generic.go:334] "Generic (PLEG): container finished" podID="ffd374d3-297b-4938-8e3f-ee2ddb92cc8a" containerID="282b2800d818bf0e9e5e46965250f026bf6da75e1bd17e2c2f12c454912daaf5" exitCode=0 Sep 29 21:41:40 crc kubenswrapper[4911]: I0929 21:41:40.290775 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7ff5475cc9-kj4rt" event={"ID":"ffd374d3-297b-4938-8e3f-ee2ddb92cc8a","Type":"ContainerDied","Data":"282b2800d818bf0e9e5e46965250f026bf6da75e1bd17e2c2f12c454912daaf5"} Sep 29 21:41:40 crc kubenswrapper[4911]: I0929 21:41:40.292979 4911 generic.go:334] "Generic (PLEG): container finished" podID="73453739-c89c-4f4d-b982-29761a648489" containerID="5cca2c9a58273a56a0fc313ef9e4681a4c935bc4896cad140267ccf7bb04cdff" exitCode=0 Sep 29 21:41:40 crc kubenswrapper[4911]: I0929 21:41:40.293045 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c5cc7c5ff-jdp5q" event={"ID":"73453739-c89c-4f4d-b982-29761a648489","Type":"ContainerDied","Data":"5cca2c9a58273a56a0fc313ef9e4681a4c935bc4896cad140267ccf7bb04cdff"} Sep 29 21:41:40 crc kubenswrapper[4911]: I0929 21:41:40.293082 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c5cc7c5ff-jdp5q" event={"ID":"73453739-c89c-4f4d-b982-29761a648489","Type":"ContainerStarted","Data":"e6579733341ed43fea66079fdfbe79e2413d276f1b80f04414d9ec37aa0f77f4"} Sep 29 21:41:40 crc kubenswrapper[4911]: I0929 21:41:40.295780 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-x6k2z" event={"ID":"f37b1e92-20d2-4105-97f3-9d42142efbf7","Type":"ContainerStarted","Data":"01c89657e24e897503e29264bfec753bae6aed2ea2b1e9e35b6f646c4b04171b"} Sep 29 21:41:40 crc kubenswrapper[4911]: I0929 21:41:40.295842 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-x6k2z" event={"ID":"f37b1e92-20d2-4105-97f3-9d42142efbf7","Type":"ContainerStarted","Data":"09cbc54f135ad002ff887dbeadae6f07a326d2fa93f02a542c7a0d5b39571d2f"} Sep 29 21:41:40 crc kubenswrapper[4911]: I0929 21:41:40.301581 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-xpfbw" event={"ID":"373766c8-b8c6-4f57-b43b-24667ddb9564","Type":"ContainerStarted","Data":"db8661e0457b5029728442896b9db088b3fe459e14c8c996911a9647436ddc32"} Sep 29 21:41:40 crc kubenswrapper[4911]: I0929 21:41:40.303160 4911 generic.go:334] "Generic (PLEG): container finished" podID="7dee5647-4d60-4283-b5a0-79cc059d340f" containerID="de4e20c9f2c95db0bc8fb0100ef822644022641040189e980b231ffd7f12ce26" exitCode=0 Sep 29 21:41:40 crc kubenswrapper[4911]: I0929 21:41:40.303260 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b5c85b87-pwvb6" event={"ID":"7dee5647-4d60-4283-b5a0-79cc059d340f","Type":"ContainerDied","Data":"de4e20c9f2c95db0bc8fb0100ef822644022641040189e980b231ffd7f12ce26"} Sep 29 21:41:40 crc kubenswrapper[4911]: I0929 21:41:40.303289 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b5c85b87-pwvb6" event={"ID":"7dee5647-4d60-4283-b5a0-79cc059d340f","Type":"ContainerStarted","Data":"d20dfecb76cfdc4570f11edac7ba83ef2d6c3e8903caf431ccc01a474f05d78b"} Sep 29 21:41:40 crc kubenswrapper[4911]: I0929 21:41:40.334905 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-x6k2z" podStartSLOduration=2.3340887280000002 podStartE2EDuration="2.334088728s" podCreationTimestamp="2025-09-29 21:41:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:41:40.333172579 +0000 UTC m=+978.310285270" watchObservedRunningTime="2025-09-29 21:41:40.334088728 +0000 UTC m=+978.311201409" Sep 29 21:41:40 crc kubenswrapper[4911]: I0929 21:41:40.683142 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 21:41:40 crc kubenswrapper[4911]: W0929 21:41:40.687276 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc8e8ce76_e024_48e1_a8ff_9648934557fa.slice/crio-66de86ae15984e89648976b86935b0b9e0d31015b5243577d3a7fb571602304e WatchSource:0}: Error finding container 66de86ae15984e89648976b86935b0b9e0d31015b5243577d3a7fb571602304e: Status 404 returned error can't find the container with id 66de86ae15984e89648976b86935b0b9e0d31015b5243577d3a7fb571602304e Sep 29 21:41:40 crc kubenswrapper[4911]: I0929 21:41:40.768754 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 21:41:40 crc kubenswrapper[4911]: W0929 21:41:40.786191 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod334714b6_2be3_4276_bebc_bda7002fdb26.slice/crio-2e651ed92423748d1a9f0c8c5ab701fad1b2cd5136bb6b218bac938513d3a301 WatchSource:0}: Error finding container 2e651ed92423748d1a9f0c8c5ab701fad1b2cd5136bb6b218bac938513d3a301: Status 404 returned error can't find the container with id 2e651ed92423748d1a9f0c8c5ab701fad1b2cd5136bb6b218bac938513d3a301 Sep 29 21:41:40 crc kubenswrapper[4911]: I0929 21:41:40.842676 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c5cc7c5ff-jdp5q" Sep 29 21:41:40 crc kubenswrapper[4911]: I0929 21:41:40.858500 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7ff5475cc9-kj4rt" Sep 29 21:41:40 crc kubenswrapper[4911]: I0929 21:41:40.908763 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 21:41:40 crc kubenswrapper[4911]: W0929 21:41:40.909873 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd12bb9a1_941b_41be_8cb9_3f274e27e497.slice/crio-e54da3d79eb60932376baf4a890797757bc83cb6cadc19db255b8a652bbce6c4 WatchSource:0}: Error finding container e54da3d79eb60932376baf4a890797757bc83cb6cadc19db255b8a652bbce6c4: Status 404 returned error can't find the container with id e54da3d79eb60932376baf4a890797757bc83cb6cadc19db255b8a652bbce6c4 Sep 29 21:41:40 crc kubenswrapper[4911]: I0929 21:41:40.918144 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/73453739-c89c-4f4d-b982-29761a648489-ovsdbserver-nb\") pod \"73453739-c89c-4f4d-b982-29761a648489\" (UID: \"73453739-c89c-4f4d-b982-29761a648489\") " Sep 29 21:41:40 crc kubenswrapper[4911]: I0929 21:41:40.918196 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/73453739-c89c-4f4d-b982-29761a648489-ovsdbserver-sb\") pod \"73453739-c89c-4f4d-b982-29761a648489\" (UID: \"73453739-c89c-4f4d-b982-29761a648489\") " Sep 29 21:41:40 crc kubenswrapper[4911]: I0929 21:41:40.918315 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jw59s\" (UniqueName: \"kubernetes.io/projected/73453739-c89c-4f4d-b982-29761a648489-kube-api-access-jw59s\") pod \"73453739-c89c-4f4d-b982-29761a648489\" (UID: \"73453739-c89c-4f4d-b982-29761a648489\") " Sep 29 21:41:40 crc kubenswrapper[4911]: I0929 21:41:40.918393 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/73453739-c89c-4f4d-b982-29761a648489-config\") pod \"73453739-c89c-4f4d-b982-29761a648489\" (UID: \"73453739-c89c-4f4d-b982-29761a648489\") " Sep 29 21:41:40 crc kubenswrapper[4911]: I0929 21:41:40.918434 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/73453739-c89c-4f4d-b982-29761a648489-dns-svc\") pod \"73453739-c89c-4f4d-b982-29761a648489\" (UID: \"73453739-c89c-4f4d-b982-29761a648489\") " Sep 29 21:41:40 crc kubenswrapper[4911]: I0929 21:41:40.918460 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/73453739-c89c-4f4d-b982-29761a648489-dns-swift-storage-0\") pod \"73453739-c89c-4f4d-b982-29761a648489\" (UID: \"73453739-c89c-4f4d-b982-29761a648489\") " Sep 29 21:41:40 crc kubenswrapper[4911]: I0929 21:41:40.938471 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/73453739-c89c-4f4d-b982-29761a648489-kube-api-access-jw59s" (OuterVolumeSpecName: "kube-api-access-jw59s") pod "73453739-c89c-4f4d-b982-29761a648489" (UID: "73453739-c89c-4f4d-b982-29761a648489"). InnerVolumeSpecName "kube-api-access-jw59s". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:41:40 crc kubenswrapper[4911]: I0929 21:41:40.940450 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/73453739-c89c-4f4d-b982-29761a648489-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "73453739-c89c-4f4d-b982-29761a648489" (UID: "73453739-c89c-4f4d-b982-29761a648489"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:41:40 crc kubenswrapper[4911]: I0929 21:41:40.947942 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/73453739-c89c-4f4d-b982-29761a648489-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "73453739-c89c-4f4d-b982-29761a648489" (UID: "73453739-c89c-4f4d-b982-29761a648489"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:41:40 crc kubenswrapper[4911]: I0929 21:41:40.949156 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/73453739-c89c-4f4d-b982-29761a648489-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "73453739-c89c-4f4d-b982-29761a648489" (UID: "73453739-c89c-4f4d-b982-29761a648489"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:41:40 crc kubenswrapper[4911]: I0929 21:41:40.964896 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/73453739-c89c-4f4d-b982-29761a648489-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "73453739-c89c-4f4d-b982-29761a648489" (UID: "73453739-c89c-4f4d-b982-29761a648489"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:41:40 crc kubenswrapper[4911]: I0929 21:41:40.967639 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/73453739-c89c-4f4d-b982-29761a648489-config" (OuterVolumeSpecName: "config") pod "73453739-c89c-4f4d-b982-29761a648489" (UID: "73453739-c89c-4f4d-b982-29761a648489"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:41:41 crc kubenswrapper[4911]: I0929 21:41:41.020639 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ffd374d3-297b-4938-8e3f-ee2ddb92cc8a-dns-swift-storage-0\") pod \"ffd374d3-297b-4938-8e3f-ee2ddb92cc8a\" (UID: \"ffd374d3-297b-4938-8e3f-ee2ddb92cc8a\") " Sep 29 21:41:41 crc kubenswrapper[4911]: I0929 21:41:41.020721 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8mlj7\" (UniqueName: \"kubernetes.io/projected/ffd374d3-297b-4938-8e3f-ee2ddb92cc8a-kube-api-access-8mlj7\") pod \"ffd374d3-297b-4938-8e3f-ee2ddb92cc8a\" (UID: \"ffd374d3-297b-4938-8e3f-ee2ddb92cc8a\") " Sep 29 21:41:41 crc kubenswrapper[4911]: I0929 21:41:41.020877 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ffd374d3-297b-4938-8e3f-ee2ddb92cc8a-config\") pod \"ffd374d3-297b-4938-8e3f-ee2ddb92cc8a\" (UID: \"ffd374d3-297b-4938-8e3f-ee2ddb92cc8a\") " Sep 29 21:41:41 crc kubenswrapper[4911]: I0929 21:41:41.021001 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ffd374d3-297b-4938-8e3f-ee2ddb92cc8a-ovsdbserver-nb\") pod \"ffd374d3-297b-4938-8e3f-ee2ddb92cc8a\" (UID: \"ffd374d3-297b-4938-8e3f-ee2ddb92cc8a\") " Sep 29 21:41:41 crc kubenswrapper[4911]: I0929 21:41:41.021081 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ffd374d3-297b-4938-8e3f-ee2ddb92cc8a-dns-svc\") pod \"ffd374d3-297b-4938-8e3f-ee2ddb92cc8a\" (UID: \"ffd374d3-297b-4938-8e3f-ee2ddb92cc8a\") " Sep 29 21:41:41 crc kubenswrapper[4911]: I0929 21:41:41.021142 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ffd374d3-297b-4938-8e3f-ee2ddb92cc8a-ovsdbserver-sb\") pod \"ffd374d3-297b-4938-8e3f-ee2ddb92cc8a\" (UID: \"ffd374d3-297b-4938-8e3f-ee2ddb92cc8a\") " Sep 29 21:41:41 crc kubenswrapper[4911]: I0929 21:41:41.021451 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jw59s\" (UniqueName: \"kubernetes.io/projected/73453739-c89c-4f4d-b982-29761a648489-kube-api-access-jw59s\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:41 crc kubenswrapper[4911]: I0929 21:41:41.021467 4911 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/73453739-c89c-4f4d-b982-29761a648489-config\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:41 crc kubenswrapper[4911]: I0929 21:41:41.021477 4911 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/73453739-c89c-4f4d-b982-29761a648489-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:41 crc kubenswrapper[4911]: I0929 21:41:41.021486 4911 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/73453739-c89c-4f4d-b982-29761a648489-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:41 crc kubenswrapper[4911]: I0929 21:41:41.021494 4911 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/73453739-c89c-4f4d-b982-29761a648489-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:41 crc kubenswrapper[4911]: I0929 21:41:41.021502 4911 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/73453739-c89c-4f4d-b982-29761a648489-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:41 crc kubenswrapper[4911]: I0929 21:41:41.030718 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ffd374d3-297b-4938-8e3f-ee2ddb92cc8a-kube-api-access-8mlj7" (OuterVolumeSpecName: "kube-api-access-8mlj7") pod "ffd374d3-297b-4938-8e3f-ee2ddb92cc8a" (UID: "ffd374d3-297b-4938-8e3f-ee2ddb92cc8a"). InnerVolumeSpecName "kube-api-access-8mlj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:41:41 crc kubenswrapper[4911]: I0929 21:41:41.045873 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ffd374d3-297b-4938-8e3f-ee2ddb92cc8a-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ffd374d3-297b-4938-8e3f-ee2ddb92cc8a" (UID: "ffd374d3-297b-4938-8e3f-ee2ddb92cc8a"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:41:41 crc kubenswrapper[4911]: I0929 21:41:41.057677 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ffd374d3-297b-4938-8e3f-ee2ddb92cc8a-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "ffd374d3-297b-4938-8e3f-ee2ddb92cc8a" (UID: "ffd374d3-297b-4938-8e3f-ee2ddb92cc8a"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:41:41 crc kubenswrapper[4911]: I0929 21:41:41.063063 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ffd374d3-297b-4938-8e3f-ee2ddb92cc8a-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "ffd374d3-297b-4938-8e3f-ee2ddb92cc8a" (UID: "ffd374d3-297b-4938-8e3f-ee2ddb92cc8a"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:41:41 crc kubenswrapper[4911]: I0929 21:41:41.068599 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ffd374d3-297b-4938-8e3f-ee2ddb92cc8a-config" (OuterVolumeSpecName: "config") pod "ffd374d3-297b-4938-8e3f-ee2ddb92cc8a" (UID: "ffd374d3-297b-4938-8e3f-ee2ddb92cc8a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:41:41 crc kubenswrapper[4911]: I0929 21:41:41.073218 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ffd374d3-297b-4938-8e3f-ee2ddb92cc8a-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "ffd374d3-297b-4938-8e3f-ee2ddb92cc8a" (UID: "ffd374d3-297b-4938-8e3f-ee2ddb92cc8a"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:41:41 crc kubenswrapper[4911]: I0929 21:41:41.122850 4911 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ffd374d3-297b-4938-8e3f-ee2ddb92cc8a-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:41 crc kubenswrapper[4911]: I0929 21:41:41.122883 4911 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ffd374d3-297b-4938-8e3f-ee2ddb92cc8a-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:41 crc kubenswrapper[4911]: I0929 21:41:41.122893 4911 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ffd374d3-297b-4938-8e3f-ee2ddb92cc8a-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:41 crc kubenswrapper[4911]: I0929 21:41:41.122901 4911 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ffd374d3-297b-4938-8e3f-ee2ddb92cc8a-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:41 crc kubenswrapper[4911]: I0929 21:41:41.122911 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8mlj7\" (UniqueName: \"kubernetes.io/projected/ffd374d3-297b-4938-8e3f-ee2ddb92cc8a-kube-api-access-8mlj7\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:41 crc kubenswrapper[4911]: I0929 21:41:41.122919 4911 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ffd374d3-297b-4938-8e3f-ee2ddb92cc8a-config\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:41 crc kubenswrapper[4911]: I0929 21:41:41.314839 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"334714b6-2be3-4276-bebc-bda7002fdb26","Type":"ContainerStarted","Data":"2e651ed92423748d1a9f0c8c5ab701fad1b2cd5136bb6b218bac938513d3a301"} Sep 29 21:41:41 crc kubenswrapper[4911]: I0929 21:41:41.317193 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7ff5475cc9-kj4rt" event={"ID":"ffd374d3-297b-4938-8e3f-ee2ddb92cc8a","Type":"ContainerDied","Data":"1db4d2ad8ee6e6cd14de6dc9eaa8838be33f184ae2650f59604c0c2feb5f21c4"} Sep 29 21:41:41 crc kubenswrapper[4911]: I0929 21:41:41.317245 4911 scope.go:117] "RemoveContainer" containerID="282b2800d818bf0e9e5e46965250f026bf6da75e1bd17e2c2f12c454912daaf5" Sep 29 21:41:41 crc kubenswrapper[4911]: I0929 21:41:41.317300 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7ff5475cc9-kj4rt" Sep 29 21:41:41 crc kubenswrapper[4911]: I0929 21:41:41.338356 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c5cc7c5ff-jdp5q" event={"ID":"73453739-c89c-4f4d-b982-29761a648489","Type":"ContainerDied","Data":"e6579733341ed43fea66079fdfbe79e2413d276f1b80f04414d9ec37aa0f77f4"} Sep 29 21:41:41 crc kubenswrapper[4911]: I0929 21:41:41.338444 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c5cc7c5ff-jdp5q" Sep 29 21:41:41 crc kubenswrapper[4911]: I0929 21:41:41.341171 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d12bb9a1-941b-41be-8cb9-3f274e27e497","Type":"ContainerStarted","Data":"e54da3d79eb60932376baf4a890797757bc83cb6cadc19db255b8a652bbce6c4"} Sep 29 21:41:41 crc kubenswrapper[4911]: I0929 21:41:41.342543 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c8e8ce76-e024-48e1-a8ff-9648934557fa","Type":"ContainerStarted","Data":"66de86ae15984e89648976b86935b0b9e0d31015b5243577d3a7fb571602304e"} Sep 29 21:41:41 crc kubenswrapper[4911]: I0929 21:41:41.359002 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b5c85b87-pwvb6" event={"ID":"7dee5647-4d60-4283-b5a0-79cc059d340f","Type":"ContainerStarted","Data":"b46ef6c12a3a0cdc0f13eebd9f70a13e6d26a43aa068cbbed851462d693e8578"} Sep 29 21:41:41 crc kubenswrapper[4911]: I0929 21:41:41.361344 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-8b5c85b87-pwvb6" Sep 29 21:41:41 crc kubenswrapper[4911]: I0929 21:41:41.409633 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7ff5475cc9-kj4rt"] Sep 29 21:41:41 crc kubenswrapper[4911]: I0929 21:41:41.418304 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-8b5c85b87-pwvb6" podStartSLOduration=3.418283525 podStartE2EDuration="3.418283525s" podCreationTimestamp="2025-09-29 21:41:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:41:41.402981301 +0000 UTC m=+979.380093982" watchObservedRunningTime="2025-09-29 21:41:41.418283525 +0000 UTC m=+979.395396196" Sep 29 21:41:41 crc kubenswrapper[4911]: I0929 21:41:41.419999 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7ff5475cc9-kj4rt"] Sep 29 21:41:41 crc kubenswrapper[4911]: I0929 21:41:41.452177 4911 scope.go:117] "RemoveContainer" containerID="5cca2c9a58273a56a0fc313ef9e4681a4c935bc4896cad140267ccf7bb04cdff" Sep 29 21:41:41 crc kubenswrapper[4911]: I0929 21:41:41.504841 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c5cc7c5ff-jdp5q"] Sep 29 21:41:41 crc kubenswrapper[4911]: I0929 21:41:41.533033 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5c5cc7c5ff-jdp5q"] Sep 29 21:41:42 crc kubenswrapper[4911]: I0929 21:41:42.374917 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"334714b6-2be3-4276-bebc-bda7002fdb26","Type":"ContainerStarted","Data":"ee5127f04a6fd3e9f3b0b8287a9a0795d47ada2a28da441a69eff49828d4d8b8"} Sep 29 21:41:42 crc kubenswrapper[4911]: I0929 21:41:42.375257 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"334714b6-2be3-4276-bebc-bda7002fdb26","Type":"ContainerStarted","Data":"889dc7dfb0c3ee8b10a85ac401a6d676ef090cf983eb14d9b39bdb136b17f066"} Sep 29 21:41:42 crc kubenswrapper[4911]: I0929 21:41:42.381129 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c8e8ce76-e024-48e1-a8ff-9648934557fa","Type":"ContainerStarted","Data":"c738deb9c3a2d2e4c5e3ab0120e5dfd37a1e14edb94c00389ebf472295b31d66"} Sep 29 21:41:42 crc kubenswrapper[4911]: I0929 21:41:42.381175 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c8e8ce76-e024-48e1-a8ff-9648934557fa","Type":"ContainerStarted","Data":"f84f85bad22b5931fa7ebec02b12867d5371c2428352c5c46866149260e7e9a3"} Sep 29 21:41:42 crc kubenswrapper[4911]: I0929 21:41:42.465140 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 21:41:42 crc kubenswrapper[4911]: I0929 21:41:42.503314 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 21:41:42 crc kubenswrapper[4911]: I0929 21:41:42.532414 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 21:41:42 crc kubenswrapper[4911]: I0929 21:41:42.715339 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="73453739-c89c-4f4d-b982-29761a648489" path="/var/lib/kubelet/pods/73453739-c89c-4f4d-b982-29761a648489/volumes" Sep 29 21:41:42 crc kubenswrapper[4911]: I0929 21:41:42.716220 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ffd374d3-297b-4938-8e3f-ee2ddb92cc8a" path="/var/lib/kubelet/pods/ffd374d3-297b-4938-8e3f-ee2ddb92cc8a/volumes" Sep 29 21:41:43 crc kubenswrapper[4911]: I0929 21:41:43.428689 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=5.42867272 podStartE2EDuration="5.42867272s" podCreationTimestamp="2025-09-29 21:41:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:41:43.417782852 +0000 UTC m=+981.394895543" watchObservedRunningTime="2025-09-29 21:41:43.42867272 +0000 UTC m=+981.405785391" Sep 29 21:41:43 crc kubenswrapper[4911]: I0929 21:41:43.449179 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=5.449160865 podStartE2EDuration="5.449160865s" podCreationTimestamp="2025-09-29 21:41:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:41:43.442613143 +0000 UTC m=+981.419725824" watchObservedRunningTime="2025-09-29 21:41:43.449160865 +0000 UTC m=+981.426273536" Sep 29 21:41:44 crc kubenswrapper[4911]: I0929 21:41:44.402372 4911 generic.go:334] "Generic (PLEG): container finished" podID="f37b1e92-20d2-4105-97f3-9d42142efbf7" containerID="01c89657e24e897503e29264bfec753bae6aed2ea2b1e9e35b6f646c4b04171b" exitCode=0 Sep 29 21:41:44 crc kubenswrapper[4911]: I0929 21:41:44.402890 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="334714b6-2be3-4276-bebc-bda7002fdb26" containerName="glance-log" containerID="cri-o://889dc7dfb0c3ee8b10a85ac401a6d676ef090cf983eb14d9b39bdb136b17f066" gracePeriod=30 Sep 29 21:41:44 crc kubenswrapper[4911]: I0929 21:41:44.402559 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-x6k2z" event={"ID":"f37b1e92-20d2-4105-97f3-9d42142efbf7","Type":"ContainerDied","Data":"01c89657e24e897503e29264bfec753bae6aed2ea2b1e9e35b6f646c4b04171b"} Sep 29 21:41:44 crc kubenswrapper[4911]: I0929 21:41:44.403174 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="c8e8ce76-e024-48e1-a8ff-9648934557fa" containerName="glance-log" containerID="cri-o://f84f85bad22b5931fa7ebec02b12867d5371c2428352c5c46866149260e7e9a3" gracePeriod=30 Sep 29 21:41:44 crc kubenswrapper[4911]: I0929 21:41:44.403616 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="334714b6-2be3-4276-bebc-bda7002fdb26" containerName="glance-httpd" containerID="cri-o://ee5127f04a6fd3e9f3b0b8287a9a0795d47ada2a28da441a69eff49828d4d8b8" gracePeriod=30 Sep 29 21:41:44 crc kubenswrapper[4911]: I0929 21:41:44.403747 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="c8e8ce76-e024-48e1-a8ff-9648934557fa" containerName="glance-httpd" containerID="cri-o://c738deb9c3a2d2e4c5e3ab0120e5dfd37a1e14edb94c00389ebf472295b31d66" gracePeriod=30 Sep 29 21:41:44 crc kubenswrapper[4911]: I0929 21:41:44.731474 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-17b1-account-create-qkj5h"] Sep 29 21:41:44 crc kubenswrapper[4911]: E0929 21:41:44.731771 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="73453739-c89c-4f4d-b982-29761a648489" containerName="init" Sep 29 21:41:44 crc kubenswrapper[4911]: I0929 21:41:44.731782 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="73453739-c89c-4f4d-b982-29761a648489" containerName="init" Sep 29 21:41:44 crc kubenswrapper[4911]: E0929 21:41:44.731818 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ffd374d3-297b-4938-8e3f-ee2ddb92cc8a" containerName="init" Sep 29 21:41:44 crc kubenswrapper[4911]: I0929 21:41:44.731824 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="ffd374d3-297b-4938-8e3f-ee2ddb92cc8a" containerName="init" Sep 29 21:41:44 crc kubenswrapper[4911]: I0929 21:41:44.731987 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="73453739-c89c-4f4d-b982-29761a648489" containerName="init" Sep 29 21:41:44 crc kubenswrapper[4911]: I0929 21:41:44.732013 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="ffd374d3-297b-4938-8e3f-ee2ddb92cc8a" containerName="init" Sep 29 21:41:44 crc kubenswrapper[4911]: I0929 21:41:44.732472 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-17b1-account-create-qkj5h"] Sep 29 21:41:44 crc kubenswrapper[4911]: I0929 21:41:44.732541 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-17b1-account-create-qkj5h" Sep 29 21:41:44 crc kubenswrapper[4911]: I0929 21:41:44.739560 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Sep 29 21:41:44 crc kubenswrapper[4911]: I0929 21:41:44.806707 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-9232-account-create-skzbz"] Sep 29 21:41:44 crc kubenswrapper[4911]: I0929 21:41:44.807935 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-9232-account-create-skzbz" Sep 29 21:41:44 crc kubenswrapper[4911]: I0929 21:41:44.810017 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Sep 29 21:41:44 crc kubenswrapper[4911]: I0929 21:41:44.813688 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-9232-account-create-skzbz"] Sep 29 21:41:44 crc kubenswrapper[4911]: I0929 21:41:44.821276 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-86v79\" (UniqueName: \"kubernetes.io/projected/4e462c34-9385-46b1-9707-87944ea13535-kube-api-access-86v79\") pod \"cinder-17b1-account-create-qkj5h\" (UID: \"4e462c34-9385-46b1-9707-87944ea13535\") " pod="openstack/cinder-17b1-account-create-qkj5h" Sep 29 21:41:44 crc kubenswrapper[4911]: I0929 21:41:44.923125 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-84w8g\" (UniqueName: \"kubernetes.io/projected/33ce950e-7ceb-4ba0-b2d1-a9257f4cfcc2-kube-api-access-84w8g\") pod \"barbican-9232-account-create-skzbz\" (UID: \"33ce950e-7ceb-4ba0-b2d1-a9257f4cfcc2\") " pod="openstack/barbican-9232-account-create-skzbz" Sep 29 21:41:44 crc kubenswrapper[4911]: I0929 21:41:44.923460 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-86v79\" (UniqueName: \"kubernetes.io/projected/4e462c34-9385-46b1-9707-87944ea13535-kube-api-access-86v79\") pod \"cinder-17b1-account-create-qkj5h\" (UID: \"4e462c34-9385-46b1-9707-87944ea13535\") " pod="openstack/cinder-17b1-account-create-qkj5h" Sep 29 21:41:44 crc kubenswrapper[4911]: I0929 21:41:44.946853 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-86v79\" (UniqueName: \"kubernetes.io/projected/4e462c34-9385-46b1-9707-87944ea13535-kube-api-access-86v79\") pod \"cinder-17b1-account-create-qkj5h\" (UID: \"4e462c34-9385-46b1-9707-87944ea13535\") " pod="openstack/cinder-17b1-account-create-qkj5h" Sep 29 21:41:45 crc kubenswrapper[4911]: I0929 21:41:45.024971 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-84w8g\" (UniqueName: \"kubernetes.io/projected/33ce950e-7ceb-4ba0-b2d1-a9257f4cfcc2-kube-api-access-84w8g\") pod \"barbican-9232-account-create-skzbz\" (UID: \"33ce950e-7ceb-4ba0-b2d1-a9257f4cfcc2\") " pod="openstack/barbican-9232-account-create-skzbz" Sep 29 21:41:45 crc kubenswrapper[4911]: I0929 21:41:45.041436 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-84w8g\" (UniqueName: \"kubernetes.io/projected/33ce950e-7ceb-4ba0-b2d1-a9257f4cfcc2-kube-api-access-84w8g\") pod \"barbican-9232-account-create-skzbz\" (UID: \"33ce950e-7ceb-4ba0-b2d1-a9257f4cfcc2\") " pod="openstack/barbican-9232-account-create-skzbz" Sep 29 21:41:45 crc kubenswrapper[4911]: I0929 21:41:45.061158 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-17b1-account-create-qkj5h" Sep 29 21:41:45 crc kubenswrapper[4911]: I0929 21:41:45.111318 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-f317-account-create-mfjgf"] Sep 29 21:41:45 crc kubenswrapper[4911]: I0929 21:41:45.113894 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-f317-account-create-mfjgf" Sep 29 21:41:45 crc kubenswrapper[4911]: I0929 21:41:45.118148 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Sep 29 21:41:45 crc kubenswrapper[4911]: I0929 21:41:45.124011 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-f317-account-create-mfjgf"] Sep 29 21:41:45 crc kubenswrapper[4911]: I0929 21:41:45.140586 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-9232-account-create-skzbz" Sep 29 21:41:45 crc kubenswrapper[4911]: I0929 21:41:45.228879 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2z92n\" (UniqueName: \"kubernetes.io/projected/bf8be712-8bd6-403d-b709-015e4be795d8-kube-api-access-2z92n\") pod \"neutron-f317-account-create-mfjgf\" (UID: \"bf8be712-8bd6-403d-b709-015e4be795d8\") " pod="openstack/neutron-f317-account-create-mfjgf" Sep 29 21:41:45 crc kubenswrapper[4911]: I0929 21:41:45.330156 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2z92n\" (UniqueName: \"kubernetes.io/projected/bf8be712-8bd6-403d-b709-015e4be795d8-kube-api-access-2z92n\") pod \"neutron-f317-account-create-mfjgf\" (UID: \"bf8be712-8bd6-403d-b709-015e4be795d8\") " pod="openstack/neutron-f317-account-create-mfjgf" Sep 29 21:41:45 crc kubenswrapper[4911]: I0929 21:41:45.357922 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2z92n\" (UniqueName: \"kubernetes.io/projected/bf8be712-8bd6-403d-b709-015e4be795d8-kube-api-access-2z92n\") pod \"neutron-f317-account-create-mfjgf\" (UID: \"bf8be712-8bd6-403d-b709-015e4be795d8\") " pod="openstack/neutron-f317-account-create-mfjgf" Sep 29 21:41:45 crc kubenswrapper[4911]: I0929 21:41:45.416778 4911 generic.go:334] "Generic (PLEG): container finished" podID="334714b6-2be3-4276-bebc-bda7002fdb26" containerID="ee5127f04a6fd3e9f3b0b8287a9a0795d47ada2a28da441a69eff49828d4d8b8" exitCode=0 Sep 29 21:41:45 crc kubenswrapper[4911]: I0929 21:41:45.416835 4911 generic.go:334] "Generic (PLEG): container finished" podID="334714b6-2be3-4276-bebc-bda7002fdb26" containerID="889dc7dfb0c3ee8b10a85ac401a6d676ef090cf983eb14d9b39bdb136b17f066" exitCode=143 Sep 29 21:41:45 crc kubenswrapper[4911]: I0929 21:41:45.416827 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"334714b6-2be3-4276-bebc-bda7002fdb26","Type":"ContainerDied","Data":"ee5127f04a6fd3e9f3b0b8287a9a0795d47ada2a28da441a69eff49828d4d8b8"} Sep 29 21:41:45 crc kubenswrapper[4911]: I0929 21:41:45.416900 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"334714b6-2be3-4276-bebc-bda7002fdb26","Type":"ContainerDied","Data":"889dc7dfb0c3ee8b10a85ac401a6d676ef090cf983eb14d9b39bdb136b17f066"} Sep 29 21:41:45 crc kubenswrapper[4911]: I0929 21:41:45.419483 4911 generic.go:334] "Generic (PLEG): container finished" podID="c8e8ce76-e024-48e1-a8ff-9648934557fa" containerID="c738deb9c3a2d2e4c5e3ab0120e5dfd37a1e14edb94c00389ebf472295b31d66" exitCode=0 Sep 29 21:41:45 crc kubenswrapper[4911]: I0929 21:41:45.419507 4911 generic.go:334] "Generic (PLEG): container finished" podID="c8e8ce76-e024-48e1-a8ff-9648934557fa" containerID="f84f85bad22b5931fa7ebec02b12867d5371c2428352c5c46866149260e7e9a3" exitCode=143 Sep 29 21:41:45 crc kubenswrapper[4911]: I0929 21:41:45.419571 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c8e8ce76-e024-48e1-a8ff-9648934557fa","Type":"ContainerDied","Data":"c738deb9c3a2d2e4c5e3ab0120e5dfd37a1e14edb94c00389ebf472295b31d66"} Sep 29 21:41:45 crc kubenswrapper[4911]: I0929 21:41:45.419624 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c8e8ce76-e024-48e1-a8ff-9648934557fa","Type":"ContainerDied","Data":"f84f85bad22b5931fa7ebec02b12867d5371c2428352c5c46866149260e7e9a3"} Sep 29 21:41:45 crc kubenswrapper[4911]: I0929 21:41:45.445265 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-f317-account-create-mfjgf" Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.059517 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-x6k2z" Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.169331 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bptc6\" (UniqueName: \"kubernetes.io/projected/f37b1e92-20d2-4105-97f3-9d42142efbf7-kube-api-access-bptc6\") pod \"f37b1e92-20d2-4105-97f3-9d42142efbf7\" (UID: \"f37b1e92-20d2-4105-97f3-9d42142efbf7\") " Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.169742 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f37b1e92-20d2-4105-97f3-9d42142efbf7-scripts\") pod \"f37b1e92-20d2-4105-97f3-9d42142efbf7\" (UID: \"f37b1e92-20d2-4105-97f3-9d42142efbf7\") " Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.169808 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f37b1e92-20d2-4105-97f3-9d42142efbf7-combined-ca-bundle\") pod \"f37b1e92-20d2-4105-97f3-9d42142efbf7\" (UID: \"f37b1e92-20d2-4105-97f3-9d42142efbf7\") " Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.169837 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/f37b1e92-20d2-4105-97f3-9d42142efbf7-fernet-keys\") pod \"f37b1e92-20d2-4105-97f3-9d42142efbf7\" (UID: \"f37b1e92-20d2-4105-97f3-9d42142efbf7\") " Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.169929 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/f37b1e92-20d2-4105-97f3-9d42142efbf7-credential-keys\") pod \"f37b1e92-20d2-4105-97f3-9d42142efbf7\" (UID: \"f37b1e92-20d2-4105-97f3-9d42142efbf7\") " Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.169964 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f37b1e92-20d2-4105-97f3-9d42142efbf7-config-data\") pod \"f37b1e92-20d2-4105-97f3-9d42142efbf7\" (UID: \"f37b1e92-20d2-4105-97f3-9d42142efbf7\") " Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.177052 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f37b1e92-20d2-4105-97f3-9d42142efbf7-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "f37b1e92-20d2-4105-97f3-9d42142efbf7" (UID: "f37b1e92-20d2-4105-97f3-9d42142efbf7"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.195146 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f37b1e92-20d2-4105-97f3-9d42142efbf7-scripts" (OuterVolumeSpecName: "scripts") pod "f37b1e92-20d2-4105-97f3-9d42142efbf7" (UID: "f37b1e92-20d2-4105-97f3-9d42142efbf7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.195897 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f37b1e92-20d2-4105-97f3-9d42142efbf7-kube-api-access-bptc6" (OuterVolumeSpecName: "kube-api-access-bptc6") pod "f37b1e92-20d2-4105-97f3-9d42142efbf7" (UID: "f37b1e92-20d2-4105-97f3-9d42142efbf7"). InnerVolumeSpecName "kube-api-access-bptc6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.195902 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f37b1e92-20d2-4105-97f3-9d42142efbf7-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "f37b1e92-20d2-4105-97f3-9d42142efbf7" (UID: "f37b1e92-20d2-4105-97f3-9d42142efbf7"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.211546 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f37b1e92-20d2-4105-97f3-9d42142efbf7-config-data" (OuterVolumeSpecName: "config-data") pod "f37b1e92-20d2-4105-97f3-9d42142efbf7" (UID: "f37b1e92-20d2-4105-97f3-9d42142efbf7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.218691 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f37b1e92-20d2-4105-97f3-9d42142efbf7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f37b1e92-20d2-4105-97f3-9d42142efbf7" (UID: "f37b1e92-20d2-4105-97f3-9d42142efbf7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.272925 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f37b1e92-20d2-4105-97f3-9d42142efbf7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.272975 4911 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/f37b1e92-20d2-4105-97f3-9d42142efbf7-fernet-keys\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.272993 4911 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/f37b1e92-20d2-4105-97f3-9d42142efbf7-credential-keys\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.273005 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f37b1e92-20d2-4105-97f3-9d42142efbf7-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.273017 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bptc6\" (UniqueName: \"kubernetes.io/projected/f37b1e92-20d2-4105-97f3-9d42142efbf7-kube-api-access-bptc6\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.273029 4911 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f37b1e92-20d2-4105-97f3-9d42142efbf7-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.453284 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-xpfbw" event={"ID":"373766c8-b8c6-4f57-b43b-24667ddb9564","Type":"ContainerStarted","Data":"134d7d482dcdb7c997c96a6bd81e94c04338e5dd41ccc582689b9d3a127edb17"} Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.455235 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-x6k2z" Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.455303 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-x6k2z" event={"ID":"f37b1e92-20d2-4105-97f3-9d42142efbf7","Type":"ContainerDied","Data":"09cbc54f135ad002ff887dbeadae6f07a326d2fa93f02a542c7a0d5b39571d2f"} Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.455339 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="09cbc54f135ad002ff887dbeadae6f07a326d2fa93f02a542c7a0d5b39571d2f" Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.457327 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d12bb9a1-941b-41be-8cb9-3f274e27e497","Type":"ContainerStarted","Data":"939ba4c9fca017dbacc3b9007aca336c5c5ecae178afe623cae8639a7efbffe5"} Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.494542 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-xpfbw" podStartSLOduration=2.341661383 podStartE2EDuration="9.494526707s" podCreationTimestamp="2025-09-29 21:41:38 +0000 UTC" firstStartedPulling="2025-09-29 21:41:39.850951053 +0000 UTC m=+977.828063724" lastFinishedPulling="2025-09-29 21:41:47.003816377 +0000 UTC m=+984.980929048" observedRunningTime="2025-09-29 21:41:47.491178542 +0000 UTC m=+985.468291213" watchObservedRunningTime="2025-09-29 21:41:47.494526707 +0000 UTC m=+985.471639378" Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.523669 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.565273 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.642548 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-17b1-account-create-qkj5h"] Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.656603 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-f317-account-create-mfjgf"] Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.681695 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c8e8ce76-e024-48e1-a8ff-9648934557fa-httpd-run\") pod \"c8e8ce76-e024-48e1-a8ff-9648934557fa\" (UID: \"c8e8ce76-e024-48e1-a8ff-9648934557fa\") " Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.681760 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/334714b6-2be3-4276-bebc-bda7002fdb26-scripts\") pod \"334714b6-2be3-4276-bebc-bda7002fdb26\" (UID: \"334714b6-2be3-4276-bebc-bda7002fdb26\") " Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.683436 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c8e8ce76-e024-48e1-a8ff-9648934557fa-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "c8e8ce76-e024-48e1-a8ff-9648934557fa" (UID: "c8e8ce76-e024-48e1-a8ff-9648934557fa"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.683509 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/334714b6-2be3-4276-bebc-bda7002fdb26-logs\") pod \"334714b6-2be3-4276-bebc-bda7002fdb26\" (UID: \"334714b6-2be3-4276-bebc-bda7002fdb26\") " Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.683550 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"c8e8ce76-e024-48e1-a8ff-9648934557fa\" (UID: \"c8e8ce76-e024-48e1-a8ff-9648934557fa\") " Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.683574 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8e8ce76-e024-48e1-a8ff-9648934557fa-combined-ca-bundle\") pod \"c8e8ce76-e024-48e1-a8ff-9648934557fa\" (UID: \"c8e8ce76-e024-48e1-a8ff-9648934557fa\") " Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.683598 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7lff\" (UniqueName: \"kubernetes.io/projected/334714b6-2be3-4276-bebc-bda7002fdb26-kube-api-access-x7lff\") pod \"334714b6-2be3-4276-bebc-bda7002fdb26\" (UID: \"334714b6-2be3-4276-bebc-bda7002fdb26\") " Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.683623 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/334714b6-2be3-4276-bebc-bda7002fdb26-config-data\") pod \"334714b6-2be3-4276-bebc-bda7002fdb26\" (UID: \"334714b6-2be3-4276-bebc-bda7002fdb26\") " Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.683667 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c8e8ce76-e024-48e1-a8ff-9648934557fa-scripts\") pod \"c8e8ce76-e024-48e1-a8ff-9648934557fa\" (UID: \"c8e8ce76-e024-48e1-a8ff-9648934557fa\") " Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.683685 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c8e8ce76-e024-48e1-a8ff-9648934557fa-logs\") pod \"c8e8ce76-e024-48e1-a8ff-9648934557fa\" (UID: \"c8e8ce76-e024-48e1-a8ff-9648934557fa\") " Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.683710 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c8e8ce76-e024-48e1-a8ff-9648934557fa-config-data\") pod \"c8e8ce76-e024-48e1-a8ff-9648934557fa\" (UID: \"c8e8ce76-e024-48e1-a8ff-9648934557fa\") " Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.683740 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/334714b6-2be3-4276-bebc-bda7002fdb26-httpd-run\") pod \"334714b6-2be3-4276-bebc-bda7002fdb26\" (UID: \"334714b6-2be3-4276-bebc-bda7002fdb26\") " Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.683776 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"334714b6-2be3-4276-bebc-bda7002fdb26\" (UID: \"334714b6-2be3-4276-bebc-bda7002fdb26\") " Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.683812 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/334714b6-2be3-4276-bebc-bda7002fdb26-combined-ca-bundle\") pod \"334714b6-2be3-4276-bebc-bda7002fdb26\" (UID: \"334714b6-2be3-4276-bebc-bda7002fdb26\") " Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.683851 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rkfhn\" (UniqueName: \"kubernetes.io/projected/c8e8ce76-e024-48e1-a8ff-9648934557fa-kube-api-access-rkfhn\") pod \"c8e8ce76-e024-48e1-a8ff-9648934557fa\" (UID: \"c8e8ce76-e024-48e1-a8ff-9648934557fa\") " Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.684155 4911 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c8e8ce76-e024-48e1-a8ff-9648934557fa-httpd-run\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.685787 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c8e8ce76-e024-48e1-a8ff-9648934557fa-logs" (OuterVolumeSpecName: "logs") pod "c8e8ce76-e024-48e1-a8ff-9648934557fa" (UID: "c8e8ce76-e024-48e1-a8ff-9648934557fa"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.688436 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/334714b6-2be3-4276-bebc-bda7002fdb26-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "334714b6-2be3-4276-bebc-bda7002fdb26" (UID: "334714b6-2be3-4276-bebc-bda7002fdb26"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.688886 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c8e8ce76-e024-48e1-a8ff-9648934557fa-kube-api-access-rkfhn" (OuterVolumeSpecName: "kube-api-access-rkfhn") pod "c8e8ce76-e024-48e1-a8ff-9648934557fa" (UID: "c8e8ce76-e024-48e1-a8ff-9648934557fa"). InnerVolumeSpecName "kube-api-access-rkfhn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.689294 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c8e8ce76-e024-48e1-a8ff-9648934557fa-scripts" (OuterVolumeSpecName: "scripts") pod "c8e8ce76-e024-48e1-a8ff-9648934557fa" (UID: "c8e8ce76-e024-48e1-a8ff-9648934557fa"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.689573 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/334714b6-2be3-4276-bebc-bda7002fdb26-logs" (OuterVolumeSpecName: "logs") pod "334714b6-2be3-4276-bebc-bda7002fdb26" (UID: "334714b6-2be3-4276-bebc-bda7002fdb26"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.690111 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "glance") pod "334714b6-2be3-4276-bebc-bda7002fdb26" (UID: "334714b6-2be3-4276-bebc-bda7002fdb26"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.691170 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/334714b6-2be3-4276-bebc-bda7002fdb26-scripts" (OuterVolumeSpecName: "scripts") pod "334714b6-2be3-4276-bebc-bda7002fdb26" (UID: "334714b6-2be3-4276-bebc-bda7002fdb26"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.698916 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/334714b6-2be3-4276-bebc-bda7002fdb26-kube-api-access-x7lff" (OuterVolumeSpecName: "kube-api-access-x7lff") pod "334714b6-2be3-4276-bebc-bda7002fdb26" (UID: "334714b6-2be3-4276-bebc-bda7002fdb26"). InnerVolumeSpecName "kube-api-access-x7lff". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.699010 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage12-crc" (OuterVolumeSpecName: "glance") pod "c8e8ce76-e024-48e1-a8ff-9648934557fa" (UID: "c8e8ce76-e024-48e1-a8ff-9648934557fa"). InnerVolumeSpecName "local-storage12-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.717605 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c8e8ce76-e024-48e1-a8ff-9648934557fa-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c8e8ce76-e024-48e1-a8ff-9648934557fa" (UID: "c8e8ce76-e024-48e1-a8ff-9648934557fa"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.718920 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/334714b6-2be3-4276-bebc-bda7002fdb26-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "334714b6-2be3-4276-bebc-bda7002fdb26" (UID: "334714b6-2be3-4276-bebc-bda7002fdb26"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.739140 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c8e8ce76-e024-48e1-a8ff-9648934557fa-config-data" (OuterVolumeSpecName: "config-data") pod "c8e8ce76-e024-48e1-a8ff-9648934557fa" (UID: "c8e8ce76-e024-48e1-a8ff-9648934557fa"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.761758 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/334714b6-2be3-4276-bebc-bda7002fdb26-config-data" (OuterVolumeSpecName: "config-data") pod "334714b6-2be3-4276-bebc-bda7002fdb26" (UID: "334714b6-2be3-4276-bebc-bda7002fdb26"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.785833 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rkfhn\" (UniqueName: \"kubernetes.io/projected/c8e8ce76-e024-48e1-a8ff-9648934557fa-kube-api-access-rkfhn\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.785863 4911 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/334714b6-2be3-4276-bebc-bda7002fdb26-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.785873 4911 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/334714b6-2be3-4276-bebc-bda7002fdb26-logs\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.785898 4911 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" " Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.785907 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8e8ce76-e024-48e1-a8ff-9648934557fa-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.785917 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7lff\" (UniqueName: \"kubernetes.io/projected/334714b6-2be3-4276-bebc-bda7002fdb26-kube-api-access-x7lff\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.785926 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/334714b6-2be3-4276-bebc-bda7002fdb26-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.785934 4911 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c8e8ce76-e024-48e1-a8ff-9648934557fa-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.785943 4911 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c8e8ce76-e024-48e1-a8ff-9648934557fa-logs\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.785951 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c8e8ce76-e024-48e1-a8ff-9648934557fa-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.785961 4911 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/334714b6-2be3-4276-bebc-bda7002fdb26-httpd-run\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.785976 4911 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.785985 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/334714b6-2be3-4276-bebc-bda7002fdb26-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.785952 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-9232-account-create-skzbz"] Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.809612 4911 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage12-crc" (UniqueName: "kubernetes.io/local-volume/local-storage12-crc") on node "crc" Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.824985 4911 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.888184 4911 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:47 crc kubenswrapper[4911]: I0929 21:41:47.888226 4911 reconciler_common.go:293] "Volume detached for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.148499 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-x6k2z"] Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.161847 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-x6k2z"] Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.244598 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-vkxxs"] Sep 29 21:41:48 crc kubenswrapper[4911]: E0929 21:41:48.244987 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="334714b6-2be3-4276-bebc-bda7002fdb26" containerName="glance-httpd" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.245006 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="334714b6-2be3-4276-bebc-bda7002fdb26" containerName="glance-httpd" Sep 29 21:41:48 crc kubenswrapper[4911]: E0929 21:41:48.245021 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="334714b6-2be3-4276-bebc-bda7002fdb26" containerName="glance-log" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.245028 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="334714b6-2be3-4276-bebc-bda7002fdb26" containerName="glance-log" Sep 29 21:41:48 crc kubenswrapper[4911]: E0929 21:41:48.245050 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8e8ce76-e024-48e1-a8ff-9648934557fa" containerName="glance-httpd" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.245058 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8e8ce76-e024-48e1-a8ff-9648934557fa" containerName="glance-httpd" Sep 29 21:41:48 crc kubenswrapper[4911]: E0929 21:41:48.245072 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f37b1e92-20d2-4105-97f3-9d42142efbf7" containerName="keystone-bootstrap" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.245078 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="f37b1e92-20d2-4105-97f3-9d42142efbf7" containerName="keystone-bootstrap" Sep 29 21:41:48 crc kubenswrapper[4911]: E0929 21:41:48.245089 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8e8ce76-e024-48e1-a8ff-9648934557fa" containerName="glance-log" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.245096 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8e8ce76-e024-48e1-a8ff-9648934557fa" containerName="glance-log" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.245264 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="334714b6-2be3-4276-bebc-bda7002fdb26" containerName="glance-log" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.245280 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="c8e8ce76-e024-48e1-a8ff-9648934557fa" containerName="glance-log" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.245288 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="c8e8ce76-e024-48e1-a8ff-9648934557fa" containerName="glance-httpd" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.245297 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="f37b1e92-20d2-4105-97f3-9d42142efbf7" containerName="keystone-bootstrap" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.245308 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="334714b6-2be3-4276-bebc-bda7002fdb26" containerName="glance-httpd" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.245937 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-vkxxs" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.248159 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-bg252" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.248391 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.249346 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.249476 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.254527 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-vkxxs"] Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.394128 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8099e715-8ed5-40c3-9e20-1a2e873b867a-fernet-keys\") pod \"keystone-bootstrap-vkxxs\" (UID: \"8099e715-8ed5-40c3-9e20-1a2e873b867a\") " pod="openstack/keystone-bootstrap-vkxxs" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.394170 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8099e715-8ed5-40c3-9e20-1a2e873b867a-config-data\") pod \"keystone-bootstrap-vkxxs\" (UID: \"8099e715-8ed5-40c3-9e20-1a2e873b867a\") " pod="openstack/keystone-bootstrap-vkxxs" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.394222 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8099e715-8ed5-40c3-9e20-1a2e873b867a-combined-ca-bundle\") pod \"keystone-bootstrap-vkxxs\" (UID: \"8099e715-8ed5-40c3-9e20-1a2e873b867a\") " pod="openstack/keystone-bootstrap-vkxxs" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.394359 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8099e715-8ed5-40c3-9e20-1a2e873b867a-credential-keys\") pod \"keystone-bootstrap-vkxxs\" (UID: \"8099e715-8ed5-40c3-9e20-1a2e873b867a\") " pod="openstack/keystone-bootstrap-vkxxs" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.394590 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8099e715-8ed5-40c3-9e20-1a2e873b867a-scripts\") pod \"keystone-bootstrap-vkxxs\" (UID: \"8099e715-8ed5-40c3-9e20-1a2e873b867a\") " pod="openstack/keystone-bootstrap-vkxxs" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.394635 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jtxk9\" (UniqueName: \"kubernetes.io/projected/8099e715-8ed5-40c3-9e20-1a2e873b867a-kube-api-access-jtxk9\") pod \"keystone-bootstrap-vkxxs\" (UID: \"8099e715-8ed5-40c3-9e20-1a2e873b867a\") " pod="openstack/keystone-bootstrap-vkxxs" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.484208 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c8e8ce76-e024-48e1-a8ff-9648934557fa","Type":"ContainerDied","Data":"66de86ae15984e89648976b86935b0b9e0d31015b5243577d3a7fb571602304e"} Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.484258 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.484266 4911 scope.go:117] "RemoveContainer" containerID="c738deb9c3a2d2e4c5e3ab0120e5dfd37a1e14edb94c00389ebf472295b31d66" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.494043 4911 generic.go:334] "Generic (PLEG): container finished" podID="4e462c34-9385-46b1-9707-87944ea13535" containerID="1112a4f45be50979d9b61d096a4a4426e4f656e845ea656ff3e02bff8e2818c3" exitCode=0 Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.494093 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-17b1-account-create-qkj5h" event={"ID":"4e462c34-9385-46b1-9707-87944ea13535","Type":"ContainerDied","Data":"1112a4f45be50979d9b61d096a4a4426e4f656e845ea656ff3e02bff8e2818c3"} Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.494153 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-17b1-account-create-qkj5h" event={"ID":"4e462c34-9385-46b1-9707-87944ea13535","Type":"ContainerStarted","Data":"e6b56eff651ca235c4e49b37bbf3da0b6481574c0e7d60b97951473e5d4913b3"} Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.496361 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8099e715-8ed5-40c3-9e20-1a2e873b867a-scripts\") pod \"keystone-bootstrap-vkxxs\" (UID: \"8099e715-8ed5-40c3-9e20-1a2e873b867a\") " pod="openstack/keystone-bootstrap-vkxxs" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.496429 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jtxk9\" (UniqueName: \"kubernetes.io/projected/8099e715-8ed5-40c3-9e20-1a2e873b867a-kube-api-access-jtxk9\") pod \"keystone-bootstrap-vkxxs\" (UID: \"8099e715-8ed5-40c3-9e20-1a2e873b867a\") " pod="openstack/keystone-bootstrap-vkxxs" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.498427 4911 generic.go:334] "Generic (PLEG): container finished" podID="33ce950e-7ceb-4ba0-b2d1-a9257f4cfcc2" containerID="f0eea6d3731afd22996d55a08ad6021c3370b28d76bf48922883a11065b6e603" exitCode=0 Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.498527 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-9232-account-create-skzbz" event={"ID":"33ce950e-7ceb-4ba0-b2d1-a9257f4cfcc2","Type":"ContainerDied","Data":"f0eea6d3731afd22996d55a08ad6021c3370b28d76bf48922883a11065b6e603"} Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.498562 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-9232-account-create-skzbz" event={"ID":"33ce950e-7ceb-4ba0-b2d1-a9257f4cfcc2","Type":"ContainerStarted","Data":"91cf735ba7e13739891d239109542393cd76e2abb70e0262655b5ec4871e27b1"} Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.499452 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8099e715-8ed5-40c3-9e20-1a2e873b867a-fernet-keys\") pod \"keystone-bootstrap-vkxxs\" (UID: \"8099e715-8ed5-40c3-9e20-1a2e873b867a\") " pod="openstack/keystone-bootstrap-vkxxs" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.500350 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8099e715-8ed5-40c3-9e20-1a2e873b867a-config-data\") pod \"keystone-bootstrap-vkxxs\" (UID: \"8099e715-8ed5-40c3-9e20-1a2e873b867a\") " pod="openstack/keystone-bootstrap-vkxxs" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.500449 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8099e715-8ed5-40c3-9e20-1a2e873b867a-combined-ca-bundle\") pod \"keystone-bootstrap-vkxxs\" (UID: \"8099e715-8ed5-40c3-9e20-1a2e873b867a\") " pod="openstack/keystone-bootstrap-vkxxs" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.500513 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8099e715-8ed5-40c3-9e20-1a2e873b867a-credential-keys\") pod \"keystone-bootstrap-vkxxs\" (UID: \"8099e715-8ed5-40c3-9e20-1a2e873b867a\") " pod="openstack/keystone-bootstrap-vkxxs" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.503541 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8099e715-8ed5-40c3-9e20-1a2e873b867a-scripts\") pod \"keystone-bootstrap-vkxxs\" (UID: \"8099e715-8ed5-40c3-9e20-1a2e873b867a\") " pod="openstack/keystone-bootstrap-vkxxs" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.503599 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8099e715-8ed5-40c3-9e20-1a2e873b867a-config-data\") pod \"keystone-bootstrap-vkxxs\" (UID: \"8099e715-8ed5-40c3-9e20-1a2e873b867a\") " pod="openstack/keystone-bootstrap-vkxxs" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.503680 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8099e715-8ed5-40c3-9e20-1a2e873b867a-fernet-keys\") pod \"keystone-bootstrap-vkxxs\" (UID: \"8099e715-8ed5-40c3-9e20-1a2e873b867a\") " pod="openstack/keystone-bootstrap-vkxxs" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.503747 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.503752 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"334714b6-2be3-4276-bebc-bda7002fdb26","Type":"ContainerDied","Data":"2e651ed92423748d1a9f0c8c5ab701fad1b2cd5136bb6b218bac938513d3a301"} Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.509783 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8099e715-8ed5-40c3-9e20-1a2e873b867a-credential-keys\") pod \"keystone-bootstrap-vkxxs\" (UID: \"8099e715-8ed5-40c3-9e20-1a2e873b867a\") " pod="openstack/keystone-bootstrap-vkxxs" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.510768 4911 generic.go:334] "Generic (PLEG): container finished" podID="bf8be712-8bd6-403d-b709-015e4be795d8" containerID="27a1ee14f12ddf347840cab9d0102756695589be15e84b6cb72e2e72aa8b748e" exitCode=0 Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.510913 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-f317-account-create-mfjgf" event={"ID":"bf8be712-8bd6-403d-b709-015e4be795d8","Type":"ContainerDied","Data":"27a1ee14f12ddf347840cab9d0102756695589be15e84b6cb72e2e72aa8b748e"} Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.510945 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-f317-account-create-mfjgf" event={"ID":"bf8be712-8bd6-403d-b709-015e4be795d8","Type":"ContainerStarted","Data":"45c2191a9f1146b88c82d473029759133be71335e1982179b07a4343eb5ef1a5"} Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.523947 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jtxk9\" (UniqueName: \"kubernetes.io/projected/8099e715-8ed5-40c3-9e20-1a2e873b867a-kube-api-access-jtxk9\") pod \"keystone-bootstrap-vkxxs\" (UID: \"8099e715-8ed5-40c3-9e20-1a2e873b867a\") " pod="openstack/keystone-bootstrap-vkxxs" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.524487 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8099e715-8ed5-40c3-9e20-1a2e873b867a-combined-ca-bundle\") pod \"keystone-bootstrap-vkxxs\" (UID: \"8099e715-8ed5-40c3-9e20-1a2e873b867a\") " pod="openstack/keystone-bootstrap-vkxxs" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.575598 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-vkxxs" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.618928 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.636783 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.645080 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.650272 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.651457 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.654774 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.655582 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.655760 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-lw9bf" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.655930 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.657659 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.663424 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.677838 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.680756 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.686064 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.686150 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.703003 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.720713 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="334714b6-2be3-4276-bebc-bda7002fdb26" path="/var/lib/kubelet/pods/334714b6-2be3-4276-bebc-bda7002fdb26/volumes" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.722137 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c8e8ce76-e024-48e1-a8ff-9648934557fa" path="/var/lib/kubelet/pods/c8e8ce76-e024-48e1-a8ff-9648934557fa/volumes" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.722858 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f37b1e92-20d2-4105-97f3-9d42142efbf7" path="/var/lib/kubelet/pods/f37b1e92-20d2-4105-97f3-9d42142efbf7/volumes" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.816606 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"77d2a08f-5a1a-4847-81a8-a160afadf6aa\") " pod="openstack/glance-default-internal-api-0" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.816684 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ba309870-1c87-422f-93c3-81e704ee754e-logs\") pod \"glance-default-external-api-0\" (UID: \"ba309870-1c87-422f-93c3-81e704ee754e\") " pod="openstack/glance-default-external-api-0" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.816709 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/77d2a08f-5a1a-4847-81a8-a160afadf6aa-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"77d2a08f-5a1a-4847-81a8-a160afadf6aa\") " pod="openstack/glance-default-internal-api-0" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.816724 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ba309870-1c87-422f-93c3-81e704ee754e-config-data\") pod \"glance-default-external-api-0\" (UID: \"ba309870-1c87-422f-93c3-81e704ee754e\") " pod="openstack/glance-default-external-api-0" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.816742 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/77d2a08f-5a1a-4847-81a8-a160afadf6aa-logs\") pod \"glance-default-internal-api-0\" (UID: \"77d2a08f-5a1a-4847-81a8-a160afadf6aa\") " pod="openstack/glance-default-internal-api-0" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.816764 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/77d2a08f-5a1a-4847-81a8-a160afadf6aa-config-data\") pod \"glance-default-internal-api-0\" (UID: \"77d2a08f-5a1a-4847-81a8-a160afadf6aa\") " pod="openstack/glance-default-internal-api-0" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.816896 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ba309870-1c87-422f-93c3-81e704ee754e-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"ba309870-1c87-422f-93c3-81e704ee754e\") " pod="openstack/glance-default-external-api-0" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.816980 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/77d2a08f-5a1a-4847-81a8-a160afadf6aa-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"77d2a08f-5a1a-4847-81a8-a160afadf6aa\") " pod="openstack/glance-default-internal-api-0" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.817061 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gjcds\" (UniqueName: \"kubernetes.io/projected/77d2a08f-5a1a-4847-81a8-a160afadf6aa-kube-api-access-gjcds\") pod \"glance-default-internal-api-0\" (UID: \"77d2a08f-5a1a-4847-81a8-a160afadf6aa\") " pod="openstack/glance-default-internal-api-0" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.817131 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pgwbt\" (UniqueName: \"kubernetes.io/projected/ba309870-1c87-422f-93c3-81e704ee754e-kube-api-access-pgwbt\") pod \"glance-default-external-api-0\" (UID: \"ba309870-1c87-422f-93c3-81e704ee754e\") " pod="openstack/glance-default-external-api-0" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.817149 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77d2a08f-5a1a-4847-81a8-a160afadf6aa-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"77d2a08f-5a1a-4847-81a8-a160afadf6aa\") " pod="openstack/glance-default-internal-api-0" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.817170 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ba309870-1c87-422f-93c3-81e704ee754e-scripts\") pod \"glance-default-external-api-0\" (UID: \"ba309870-1c87-422f-93c3-81e704ee754e\") " pod="openstack/glance-default-external-api-0" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.817202 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/77d2a08f-5a1a-4847-81a8-a160afadf6aa-scripts\") pod \"glance-default-internal-api-0\" (UID: \"77d2a08f-5a1a-4847-81a8-a160afadf6aa\") " pod="openstack/glance-default-internal-api-0" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.817233 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba309870-1c87-422f-93c3-81e704ee754e-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"ba309870-1c87-422f-93c3-81e704ee754e\") " pod="openstack/glance-default-external-api-0" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.817284 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ba309870-1c87-422f-93c3-81e704ee754e-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"ba309870-1c87-422f-93c3-81e704ee754e\") " pod="openstack/glance-default-external-api-0" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.817491 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"ba309870-1c87-422f-93c3-81e704ee754e\") " pod="openstack/glance-default-external-api-0" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.919266 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pgwbt\" (UniqueName: \"kubernetes.io/projected/ba309870-1c87-422f-93c3-81e704ee754e-kube-api-access-pgwbt\") pod \"glance-default-external-api-0\" (UID: \"ba309870-1c87-422f-93c3-81e704ee754e\") " pod="openstack/glance-default-external-api-0" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.919303 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77d2a08f-5a1a-4847-81a8-a160afadf6aa-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"77d2a08f-5a1a-4847-81a8-a160afadf6aa\") " pod="openstack/glance-default-internal-api-0" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.919323 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ba309870-1c87-422f-93c3-81e704ee754e-scripts\") pod \"glance-default-external-api-0\" (UID: \"ba309870-1c87-422f-93c3-81e704ee754e\") " pod="openstack/glance-default-external-api-0" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.919346 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/77d2a08f-5a1a-4847-81a8-a160afadf6aa-scripts\") pod \"glance-default-internal-api-0\" (UID: \"77d2a08f-5a1a-4847-81a8-a160afadf6aa\") " pod="openstack/glance-default-internal-api-0" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.919365 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba309870-1c87-422f-93c3-81e704ee754e-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"ba309870-1c87-422f-93c3-81e704ee754e\") " pod="openstack/glance-default-external-api-0" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.919400 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ba309870-1c87-422f-93c3-81e704ee754e-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"ba309870-1c87-422f-93c3-81e704ee754e\") " pod="openstack/glance-default-external-api-0" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.919418 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"ba309870-1c87-422f-93c3-81e704ee754e\") " pod="openstack/glance-default-external-api-0" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.919453 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"77d2a08f-5a1a-4847-81a8-a160afadf6aa\") " pod="openstack/glance-default-internal-api-0" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.919480 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ba309870-1c87-422f-93c3-81e704ee754e-logs\") pod \"glance-default-external-api-0\" (UID: \"ba309870-1c87-422f-93c3-81e704ee754e\") " pod="openstack/glance-default-external-api-0" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.919499 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ba309870-1c87-422f-93c3-81e704ee754e-config-data\") pod \"glance-default-external-api-0\" (UID: \"ba309870-1c87-422f-93c3-81e704ee754e\") " pod="openstack/glance-default-external-api-0" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.919514 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/77d2a08f-5a1a-4847-81a8-a160afadf6aa-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"77d2a08f-5a1a-4847-81a8-a160afadf6aa\") " pod="openstack/glance-default-internal-api-0" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.919531 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/77d2a08f-5a1a-4847-81a8-a160afadf6aa-logs\") pod \"glance-default-internal-api-0\" (UID: \"77d2a08f-5a1a-4847-81a8-a160afadf6aa\") " pod="openstack/glance-default-internal-api-0" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.919549 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/77d2a08f-5a1a-4847-81a8-a160afadf6aa-config-data\") pod \"glance-default-internal-api-0\" (UID: \"77d2a08f-5a1a-4847-81a8-a160afadf6aa\") " pod="openstack/glance-default-internal-api-0" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.919572 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ba309870-1c87-422f-93c3-81e704ee754e-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"ba309870-1c87-422f-93c3-81e704ee754e\") " pod="openstack/glance-default-external-api-0" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.919612 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/77d2a08f-5a1a-4847-81a8-a160afadf6aa-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"77d2a08f-5a1a-4847-81a8-a160afadf6aa\") " pod="openstack/glance-default-internal-api-0" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.919647 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gjcds\" (UniqueName: \"kubernetes.io/projected/77d2a08f-5a1a-4847-81a8-a160afadf6aa-kube-api-access-gjcds\") pod \"glance-default-internal-api-0\" (UID: \"77d2a08f-5a1a-4847-81a8-a160afadf6aa\") " pod="openstack/glance-default-internal-api-0" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.920199 4911 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"77d2a08f-5a1a-4847-81a8-a160afadf6aa\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/glance-default-internal-api-0" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.920336 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ba309870-1c87-422f-93c3-81e704ee754e-logs\") pod \"glance-default-external-api-0\" (UID: \"ba309870-1c87-422f-93c3-81e704ee754e\") " pod="openstack/glance-default-external-api-0" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.921321 4911 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"ba309870-1c87-422f-93c3-81e704ee754e\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/glance-default-external-api-0" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.921625 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/77d2a08f-5a1a-4847-81a8-a160afadf6aa-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"77d2a08f-5a1a-4847-81a8-a160afadf6aa\") " pod="openstack/glance-default-internal-api-0" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.921704 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/77d2a08f-5a1a-4847-81a8-a160afadf6aa-logs\") pod \"glance-default-internal-api-0\" (UID: \"77d2a08f-5a1a-4847-81a8-a160afadf6aa\") " pod="openstack/glance-default-internal-api-0" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.921904 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ba309870-1c87-422f-93c3-81e704ee754e-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"ba309870-1c87-422f-93c3-81e704ee754e\") " pod="openstack/glance-default-external-api-0" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.939737 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ba309870-1c87-422f-93c3-81e704ee754e-config-data\") pod \"glance-default-external-api-0\" (UID: \"ba309870-1c87-422f-93c3-81e704ee754e\") " pod="openstack/glance-default-external-api-0" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.950778 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ba309870-1c87-422f-93c3-81e704ee754e-scripts\") pod \"glance-default-external-api-0\" (UID: \"ba309870-1c87-422f-93c3-81e704ee754e\") " pod="openstack/glance-default-external-api-0" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.951101 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gjcds\" (UniqueName: \"kubernetes.io/projected/77d2a08f-5a1a-4847-81a8-a160afadf6aa-kube-api-access-gjcds\") pod \"glance-default-internal-api-0\" (UID: \"77d2a08f-5a1a-4847-81a8-a160afadf6aa\") " pod="openstack/glance-default-internal-api-0" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.955334 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77d2a08f-5a1a-4847-81a8-a160afadf6aa-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"77d2a08f-5a1a-4847-81a8-a160afadf6aa\") " pod="openstack/glance-default-internal-api-0" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.955431 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/77d2a08f-5a1a-4847-81a8-a160afadf6aa-config-data\") pod \"glance-default-internal-api-0\" (UID: \"77d2a08f-5a1a-4847-81a8-a160afadf6aa\") " pod="openstack/glance-default-internal-api-0" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.971603 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/77d2a08f-5a1a-4847-81a8-a160afadf6aa-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"77d2a08f-5a1a-4847-81a8-a160afadf6aa\") " pod="openstack/glance-default-internal-api-0" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.979499 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/77d2a08f-5a1a-4847-81a8-a160afadf6aa-scripts\") pod \"glance-default-internal-api-0\" (UID: \"77d2a08f-5a1a-4847-81a8-a160afadf6aa\") " pod="openstack/glance-default-internal-api-0" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.980433 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ba309870-1c87-422f-93c3-81e704ee754e-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"ba309870-1c87-422f-93c3-81e704ee754e\") " pod="openstack/glance-default-external-api-0" Sep 29 21:41:48 crc kubenswrapper[4911]: I0929 21:41:48.999486 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pgwbt\" (UniqueName: \"kubernetes.io/projected/ba309870-1c87-422f-93c3-81e704ee754e-kube-api-access-pgwbt\") pod \"glance-default-external-api-0\" (UID: \"ba309870-1c87-422f-93c3-81e704ee754e\") " pod="openstack/glance-default-external-api-0" Sep 29 21:41:49 crc kubenswrapper[4911]: I0929 21:41:49.034296 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba309870-1c87-422f-93c3-81e704ee754e-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"ba309870-1c87-422f-93c3-81e704ee754e\") " pod="openstack/glance-default-external-api-0" Sep 29 21:41:49 crc kubenswrapper[4911]: I0929 21:41:49.069008 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"77d2a08f-5a1a-4847-81a8-a160afadf6aa\") " pod="openstack/glance-default-internal-api-0" Sep 29 21:41:49 crc kubenswrapper[4911]: I0929 21:41:49.080615 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"ba309870-1c87-422f-93c3-81e704ee754e\") " pod="openstack/glance-default-external-api-0" Sep 29 21:41:49 crc kubenswrapper[4911]: I0929 21:41:49.274029 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 21:41:49 crc kubenswrapper[4911]: I0929 21:41:49.307746 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 21:41:49 crc kubenswrapper[4911]: I0929 21:41:49.332041 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-8b5c85b87-pwvb6" Sep 29 21:41:49 crc kubenswrapper[4911]: I0929 21:41:49.385523 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-8k779"] Sep 29 21:41:49 crc kubenswrapper[4911]: I0929 21:41:49.385816 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-77585f5f8c-8k779" podUID="b9c4944f-6978-483c-a49c-5e0dc98ca4af" containerName="dnsmasq-dns" containerID="cri-o://878645bc3d2a6004bc5236fab6bfaa6a001dd82afcd3aeb277806c9ff8373a68" gracePeriod=10 Sep 29 21:41:49 crc kubenswrapper[4911]: I0929 21:41:49.820845 4911 scope.go:117] "RemoveContainer" containerID="f84f85bad22b5931fa7ebec02b12867d5371c2428352c5c46866149260e7e9a3" Sep 29 21:41:49 crc kubenswrapper[4911]: I0929 21:41:49.900513 4911 scope.go:117] "RemoveContainer" containerID="ee5127f04a6fd3e9f3b0b8287a9a0795d47ada2a28da441a69eff49828d4d8b8" Sep 29 21:41:49 crc kubenswrapper[4911]: I0929 21:41:49.963129 4911 scope.go:117] "RemoveContainer" containerID="889dc7dfb0c3ee8b10a85ac401a6d676ef090cf983eb14d9b39bdb136b17f066" Sep 29 21:41:50 crc kubenswrapper[4911]: I0929 21:41:50.036780 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-f317-account-create-mfjgf" Sep 29 21:41:50 crc kubenswrapper[4911]: I0929 21:41:50.073731 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-9232-account-create-skzbz" Sep 29 21:41:50 crc kubenswrapper[4911]: I0929 21:41:50.088206 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-17b1-account-create-qkj5h" Sep 29 21:41:50 crc kubenswrapper[4911]: I0929 21:41:50.150927 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2z92n\" (UniqueName: \"kubernetes.io/projected/bf8be712-8bd6-403d-b709-015e4be795d8-kube-api-access-2z92n\") pod \"bf8be712-8bd6-403d-b709-015e4be795d8\" (UID: \"bf8be712-8bd6-403d-b709-015e4be795d8\") " Sep 29 21:41:50 crc kubenswrapper[4911]: I0929 21:41:50.151149 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-84w8g\" (UniqueName: \"kubernetes.io/projected/33ce950e-7ceb-4ba0-b2d1-a9257f4cfcc2-kube-api-access-84w8g\") pod \"33ce950e-7ceb-4ba0-b2d1-a9257f4cfcc2\" (UID: \"33ce950e-7ceb-4ba0-b2d1-a9257f4cfcc2\") " Sep 29 21:41:50 crc kubenswrapper[4911]: I0929 21:41:50.156873 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf8be712-8bd6-403d-b709-015e4be795d8-kube-api-access-2z92n" (OuterVolumeSpecName: "kube-api-access-2z92n") pod "bf8be712-8bd6-403d-b709-015e4be795d8" (UID: "bf8be712-8bd6-403d-b709-015e4be795d8"). InnerVolumeSpecName "kube-api-access-2z92n". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:41:50 crc kubenswrapper[4911]: I0929 21:41:50.168918 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/33ce950e-7ceb-4ba0-b2d1-a9257f4cfcc2-kube-api-access-84w8g" (OuterVolumeSpecName: "kube-api-access-84w8g") pod "33ce950e-7ceb-4ba0-b2d1-a9257f4cfcc2" (UID: "33ce950e-7ceb-4ba0-b2d1-a9257f4cfcc2"). InnerVolumeSpecName "kube-api-access-84w8g". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:41:50 crc kubenswrapper[4911]: I0929 21:41:50.252520 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-86v79\" (UniqueName: \"kubernetes.io/projected/4e462c34-9385-46b1-9707-87944ea13535-kube-api-access-86v79\") pod \"4e462c34-9385-46b1-9707-87944ea13535\" (UID: \"4e462c34-9385-46b1-9707-87944ea13535\") " Sep 29 21:41:50 crc kubenswrapper[4911]: I0929 21:41:50.253110 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-84w8g\" (UniqueName: \"kubernetes.io/projected/33ce950e-7ceb-4ba0-b2d1-a9257f4cfcc2-kube-api-access-84w8g\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:50 crc kubenswrapper[4911]: I0929 21:41:50.253132 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2z92n\" (UniqueName: \"kubernetes.io/projected/bf8be712-8bd6-403d-b709-015e4be795d8-kube-api-access-2z92n\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:50 crc kubenswrapper[4911]: I0929 21:41:50.255530 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e462c34-9385-46b1-9707-87944ea13535-kube-api-access-86v79" (OuterVolumeSpecName: "kube-api-access-86v79") pod "4e462c34-9385-46b1-9707-87944ea13535" (UID: "4e462c34-9385-46b1-9707-87944ea13535"). InnerVolumeSpecName "kube-api-access-86v79". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:41:50 crc kubenswrapper[4911]: I0929 21:41:50.354909 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-86v79\" (UniqueName: \"kubernetes.io/projected/4e462c34-9385-46b1-9707-87944ea13535-kube-api-access-86v79\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:50 crc kubenswrapper[4911]: I0929 21:41:50.392874 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-vkxxs"] Sep 29 21:41:50 crc kubenswrapper[4911]: I0929 21:41:50.459707 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 21:41:50 crc kubenswrapper[4911]: W0929 21:41:50.469146 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podba309870_1c87_422f_93c3_81e704ee754e.slice/crio-e72b896eb112745aa52f575128e79d4b4f2e335fee7b8ec49ed07be101e8d452 WatchSource:0}: Error finding container e72b896eb112745aa52f575128e79d4b4f2e335fee7b8ec49ed07be101e8d452: Status 404 returned error can't find the container with id e72b896eb112745aa52f575128e79d4b4f2e335fee7b8ec49ed07be101e8d452 Sep 29 21:41:50 crc kubenswrapper[4911]: I0929 21:41:50.576989 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d12bb9a1-941b-41be-8cb9-3f274e27e497","Type":"ContainerStarted","Data":"97309d3a86a8f19a58eed97212075dc0bbcda623387345233bfabbf6436f474c"} Sep 29 21:41:50 crc kubenswrapper[4911]: I0929 21:41:50.581246 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-f317-account-create-mfjgf" event={"ID":"bf8be712-8bd6-403d-b709-015e4be795d8","Type":"ContainerDied","Data":"45c2191a9f1146b88c82d473029759133be71335e1982179b07a4343eb5ef1a5"} Sep 29 21:41:50 crc kubenswrapper[4911]: I0929 21:41:50.581319 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-f317-account-create-mfjgf" Sep 29 21:41:50 crc kubenswrapper[4911]: I0929 21:41:50.581330 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="45c2191a9f1146b88c82d473029759133be71335e1982179b07a4343eb5ef1a5" Sep 29 21:41:50 crc kubenswrapper[4911]: I0929 21:41:50.590756 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ba309870-1c87-422f-93c3-81e704ee754e","Type":"ContainerStarted","Data":"e72b896eb112745aa52f575128e79d4b4f2e335fee7b8ec49ed07be101e8d452"} Sep 29 21:41:50 crc kubenswrapper[4911]: I0929 21:41:50.592605 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-9232-account-create-skzbz" event={"ID":"33ce950e-7ceb-4ba0-b2d1-a9257f4cfcc2","Type":"ContainerDied","Data":"91cf735ba7e13739891d239109542393cd76e2abb70e0262655b5ec4871e27b1"} Sep 29 21:41:50 crc kubenswrapper[4911]: I0929 21:41:50.592644 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="91cf735ba7e13739891d239109542393cd76e2abb70e0262655b5ec4871e27b1" Sep 29 21:41:50 crc kubenswrapper[4911]: I0929 21:41:50.592732 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-9232-account-create-skzbz" Sep 29 21:41:50 crc kubenswrapper[4911]: I0929 21:41:50.600373 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-vkxxs" event={"ID":"8099e715-8ed5-40c3-9e20-1a2e873b867a","Type":"ContainerStarted","Data":"4ecb54cb9b450b1b5decc8f810c9fcb5b73fb2590f3dae198fb3302521c32647"} Sep 29 21:41:50 crc kubenswrapper[4911]: I0929 21:41:50.622999 4911 generic.go:334] "Generic (PLEG): container finished" podID="b9c4944f-6978-483c-a49c-5e0dc98ca4af" containerID="878645bc3d2a6004bc5236fab6bfaa6a001dd82afcd3aeb277806c9ff8373a68" exitCode=0 Sep 29 21:41:50 crc kubenswrapper[4911]: I0929 21:41:50.623081 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77585f5f8c-8k779" event={"ID":"b9c4944f-6978-483c-a49c-5e0dc98ca4af","Type":"ContainerDied","Data":"878645bc3d2a6004bc5236fab6bfaa6a001dd82afcd3aeb277806c9ff8373a68"} Sep 29 21:41:50 crc kubenswrapper[4911]: I0929 21:41:50.634950 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-17b1-account-create-qkj5h" event={"ID":"4e462c34-9385-46b1-9707-87944ea13535","Type":"ContainerDied","Data":"e6b56eff651ca235c4e49b37bbf3da0b6481574c0e7d60b97951473e5d4913b3"} Sep 29 21:41:50 crc kubenswrapper[4911]: I0929 21:41:50.634992 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e6b56eff651ca235c4e49b37bbf3da0b6481574c0e7d60b97951473e5d4913b3" Sep 29 21:41:50 crc kubenswrapper[4911]: I0929 21:41:50.635073 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-17b1-account-create-qkj5h" Sep 29 21:41:50 crc kubenswrapper[4911]: I0929 21:41:50.663731 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77585f5f8c-8k779" Sep 29 21:41:50 crc kubenswrapper[4911]: I0929 21:41:50.696240 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-vkxxs" podStartSLOduration=2.69620479 podStartE2EDuration="2.69620479s" podCreationTimestamp="2025-09-29 21:41:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:41:50.631090011 +0000 UTC m=+988.608202682" watchObservedRunningTime="2025-09-29 21:41:50.69620479 +0000 UTC m=+988.673317481" Sep 29 21:41:50 crc kubenswrapper[4911]: I0929 21:41:50.766634 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b9c4944f-6978-483c-a49c-5e0dc98ca4af-dns-svc\") pod \"b9c4944f-6978-483c-a49c-5e0dc98ca4af\" (UID: \"b9c4944f-6978-483c-a49c-5e0dc98ca4af\") " Sep 29 21:41:50 crc kubenswrapper[4911]: I0929 21:41:50.766806 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f59hd\" (UniqueName: \"kubernetes.io/projected/b9c4944f-6978-483c-a49c-5e0dc98ca4af-kube-api-access-f59hd\") pod \"b9c4944f-6978-483c-a49c-5e0dc98ca4af\" (UID: \"b9c4944f-6978-483c-a49c-5e0dc98ca4af\") " Sep 29 21:41:50 crc kubenswrapper[4911]: I0929 21:41:50.766890 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b9c4944f-6978-483c-a49c-5e0dc98ca4af-dns-swift-storage-0\") pod \"b9c4944f-6978-483c-a49c-5e0dc98ca4af\" (UID: \"b9c4944f-6978-483c-a49c-5e0dc98ca4af\") " Sep 29 21:41:50 crc kubenswrapper[4911]: I0929 21:41:50.766946 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b9c4944f-6978-483c-a49c-5e0dc98ca4af-ovsdbserver-sb\") pod \"b9c4944f-6978-483c-a49c-5e0dc98ca4af\" (UID: \"b9c4944f-6978-483c-a49c-5e0dc98ca4af\") " Sep 29 21:41:50 crc kubenswrapper[4911]: I0929 21:41:50.767707 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b9c4944f-6978-483c-a49c-5e0dc98ca4af-config\") pod \"b9c4944f-6978-483c-a49c-5e0dc98ca4af\" (UID: \"b9c4944f-6978-483c-a49c-5e0dc98ca4af\") " Sep 29 21:41:50 crc kubenswrapper[4911]: I0929 21:41:50.767778 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b9c4944f-6978-483c-a49c-5e0dc98ca4af-ovsdbserver-nb\") pod \"b9c4944f-6978-483c-a49c-5e0dc98ca4af\" (UID: \"b9c4944f-6978-483c-a49c-5e0dc98ca4af\") " Sep 29 21:41:50 crc kubenswrapper[4911]: I0929 21:41:50.774049 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b9c4944f-6978-483c-a49c-5e0dc98ca4af-kube-api-access-f59hd" (OuterVolumeSpecName: "kube-api-access-f59hd") pod "b9c4944f-6978-483c-a49c-5e0dc98ca4af" (UID: "b9c4944f-6978-483c-a49c-5e0dc98ca4af"). InnerVolumeSpecName "kube-api-access-f59hd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:41:50 crc kubenswrapper[4911]: I0929 21:41:50.816182 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b9c4944f-6978-483c-a49c-5e0dc98ca4af-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "b9c4944f-6978-483c-a49c-5e0dc98ca4af" (UID: "b9c4944f-6978-483c-a49c-5e0dc98ca4af"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:41:50 crc kubenswrapper[4911]: I0929 21:41:50.817826 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b9c4944f-6978-483c-a49c-5e0dc98ca4af-config" (OuterVolumeSpecName: "config") pod "b9c4944f-6978-483c-a49c-5e0dc98ca4af" (UID: "b9c4944f-6978-483c-a49c-5e0dc98ca4af"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:41:50 crc kubenswrapper[4911]: I0929 21:41:50.818370 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b9c4944f-6978-483c-a49c-5e0dc98ca4af-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "b9c4944f-6978-483c-a49c-5e0dc98ca4af" (UID: "b9c4944f-6978-483c-a49c-5e0dc98ca4af"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:41:50 crc kubenswrapper[4911]: I0929 21:41:50.825563 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b9c4944f-6978-483c-a49c-5e0dc98ca4af-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "b9c4944f-6978-483c-a49c-5e0dc98ca4af" (UID: "b9c4944f-6978-483c-a49c-5e0dc98ca4af"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:41:50 crc kubenswrapper[4911]: I0929 21:41:50.842327 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b9c4944f-6978-483c-a49c-5e0dc98ca4af-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "b9c4944f-6978-483c-a49c-5e0dc98ca4af" (UID: "b9c4944f-6978-483c-a49c-5e0dc98ca4af"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:41:50 crc kubenswrapper[4911]: I0929 21:41:50.870953 4911 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b9c4944f-6978-483c-a49c-5e0dc98ca4af-config\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:50 crc kubenswrapper[4911]: I0929 21:41:50.871173 4911 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b9c4944f-6978-483c-a49c-5e0dc98ca4af-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:50 crc kubenswrapper[4911]: I0929 21:41:50.871185 4911 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b9c4944f-6978-483c-a49c-5e0dc98ca4af-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:50 crc kubenswrapper[4911]: I0929 21:41:50.871194 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f59hd\" (UniqueName: \"kubernetes.io/projected/b9c4944f-6978-483c-a49c-5e0dc98ca4af-kube-api-access-f59hd\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:50 crc kubenswrapper[4911]: I0929 21:41:50.871203 4911 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b9c4944f-6978-483c-a49c-5e0dc98ca4af-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:50 crc kubenswrapper[4911]: I0929 21:41:50.871212 4911 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b9c4944f-6978-483c-a49c-5e0dc98ca4af-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:51 crc kubenswrapper[4911]: I0929 21:41:51.404611 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 21:41:51 crc kubenswrapper[4911]: W0929 21:41:51.422878 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod77d2a08f_5a1a_4847_81a8_a160afadf6aa.slice/crio-e1cce8a028b7709bc7edb400625fbe77faa4a1bf4156bbecdc2cf0bff187f1be WatchSource:0}: Error finding container e1cce8a028b7709bc7edb400625fbe77faa4a1bf4156bbecdc2cf0bff187f1be: Status 404 returned error can't find the container with id e1cce8a028b7709bc7edb400625fbe77faa4a1bf4156bbecdc2cf0bff187f1be Sep 29 21:41:51 crc kubenswrapper[4911]: I0929 21:41:51.646625 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"77d2a08f-5a1a-4847-81a8-a160afadf6aa","Type":"ContainerStarted","Data":"e1cce8a028b7709bc7edb400625fbe77faa4a1bf4156bbecdc2cf0bff187f1be"} Sep 29 21:41:51 crc kubenswrapper[4911]: I0929 21:41:51.651178 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77585f5f8c-8k779" event={"ID":"b9c4944f-6978-483c-a49c-5e0dc98ca4af","Type":"ContainerDied","Data":"898b8200a2af3a45dfcfd4f34021ee799d9eb67b3eddfd4cc244f0f66559517d"} Sep 29 21:41:51 crc kubenswrapper[4911]: I0929 21:41:51.651230 4911 scope.go:117] "RemoveContainer" containerID="878645bc3d2a6004bc5236fab6bfaa6a001dd82afcd3aeb277806c9ff8373a68" Sep 29 21:41:51 crc kubenswrapper[4911]: I0929 21:41:51.651387 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77585f5f8c-8k779" Sep 29 21:41:51 crc kubenswrapper[4911]: I0929 21:41:51.659467 4911 generic.go:334] "Generic (PLEG): container finished" podID="373766c8-b8c6-4f57-b43b-24667ddb9564" containerID="134d7d482dcdb7c997c96a6bd81e94c04338e5dd41ccc582689b9d3a127edb17" exitCode=0 Sep 29 21:41:51 crc kubenswrapper[4911]: I0929 21:41:51.659555 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-xpfbw" event={"ID":"373766c8-b8c6-4f57-b43b-24667ddb9564","Type":"ContainerDied","Data":"134d7d482dcdb7c997c96a6bd81e94c04338e5dd41ccc582689b9d3a127edb17"} Sep 29 21:41:51 crc kubenswrapper[4911]: I0929 21:41:51.663921 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-vkxxs" event={"ID":"8099e715-8ed5-40c3-9e20-1a2e873b867a","Type":"ContainerStarted","Data":"6ef83c83bb9f3e1ac135fccde8835799fb03907f0a39ceaacdd8b5b817560052"} Sep 29 21:41:51 crc kubenswrapper[4911]: I0929 21:41:51.667338 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ba309870-1c87-422f-93c3-81e704ee754e","Type":"ContainerStarted","Data":"019bfc1cc71a4b337fe89740955a869fc45c8db87475d1e1d8f451112efe7ef0"} Sep 29 21:41:51 crc kubenswrapper[4911]: I0929 21:41:51.703184 4911 scope.go:117] "RemoveContainer" containerID="380a388c68c9704fb9f5b6f4f56b098df39fa3c57273adbc8f71b7202e793c9b" Sep 29 21:41:51 crc kubenswrapper[4911]: I0929 21:41:51.717363 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-8k779"] Sep 29 21:41:51 crc kubenswrapper[4911]: I0929 21:41:51.731652 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-8k779"] Sep 29 21:41:52 crc kubenswrapper[4911]: I0929 21:41:52.680978 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ba309870-1c87-422f-93c3-81e704ee754e","Type":"ContainerStarted","Data":"9c1c93dd23664949239ecc828a60ae4e9951dc1c1d9b1b6819469a961e9c30b6"} Sep 29 21:41:52 crc kubenswrapper[4911]: I0929 21:41:52.683523 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"77d2a08f-5a1a-4847-81a8-a160afadf6aa","Type":"ContainerStarted","Data":"a7949bce4d8e3792e9e327667dfbc6e575e8c13b5ea06a6dd1dda31aa1560f2f"} Sep 29 21:41:52 crc kubenswrapper[4911]: I0929 21:41:52.709869 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=4.709849136 podStartE2EDuration="4.709849136s" podCreationTimestamp="2025-09-29 21:41:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:41:52.70096488 +0000 UTC m=+990.678077561" watchObservedRunningTime="2025-09-29 21:41:52.709849136 +0000 UTC m=+990.686961807" Sep 29 21:41:52 crc kubenswrapper[4911]: I0929 21:41:52.720506 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b9c4944f-6978-483c-a49c-5e0dc98ca4af" path="/var/lib/kubelet/pods/b9c4944f-6978-483c-a49c-5e0dc98ca4af/volumes" Sep 29 21:41:53 crc kubenswrapper[4911]: I0929 21:41:53.715035 4911 generic.go:334] "Generic (PLEG): container finished" podID="8099e715-8ed5-40c3-9e20-1a2e873b867a" containerID="6ef83c83bb9f3e1ac135fccde8835799fb03907f0a39ceaacdd8b5b817560052" exitCode=0 Sep 29 21:41:53 crc kubenswrapper[4911]: I0929 21:41:53.715269 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-vkxxs" event={"ID":"8099e715-8ed5-40c3-9e20-1a2e873b867a","Type":"ContainerDied","Data":"6ef83c83bb9f3e1ac135fccde8835799fb03907f0a39ceaacdd8b5b817560052"} Sep 29 21:41:53 crc kubenswrapper[4911]: I0929 21:41:53.720733 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"77d2a08f-5a1a-4847-81a8-a160afadf6aa","Type":"ContainerStarted","Data":"611630bd6f301abb0a92730c9bab0a0a1f05ea2f08d68536c17a12bd207233c2"} Sep 29 21:41:53 crc kubenswrapper[4911]: I0929 21:41:53.765398 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=5.765373564 podStartE2EDuration="5.765373564s" podCreationTimestamp="2025-09-29 21:41:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:41:53.759843162 +0000 UTC m=+991.736955853" watchObservedRunningTime="2025-09-29 21:41:53.765373564 +0000 UTC m=+991.742486265" Sep 29 21:41:54 crc kubenswrapper[4911]: I0929 21:41:54.960706 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-m4r9b"] Sep 29 21:41:54 crc kubenswrapper[4911]: E0929 21:41:54.961851 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf8be712-8bd6-403d-b709-015e4be795d8" containerName="mariadb-account-create" Sep 29 21:41:54 crc kubenswrapper[4911]: I0929 21:41:54.961865 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf8be712-8bd6-403d-b709-015e4be795d8" containerName="mariadb-account-create" Sep 29 21:41:54 crc kubenswrapper[4911]: E0929 21:41:54.961897 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e462c34-9385-46b1-9707-87944ea13535" containerName="mariadb-account-create" Sep 29 21:41:54 crc kubenswrapper[4911]: I0929 21:41:54.961993 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e462c34-9385-46b1-9707-87944ea13535" containerName="mariadb-account-create" Sep 29 21:41:54 crc kubenswrapper[4911]: E0929 21:41:54.962014 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33ce950e-7ceb-4ba0-b2d1-a9257f4cfcc2" containerName="mariadb-account-create" Sep 29 21:41:54 crc kubenswrapper[4911]: I0929 21:41:54.962021 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="33ce950e-7ceb-4ba0-b2d1-a9257f4cfcc2" containerName="mariadb-account-create" Sep 29 21:41:54 crc kubenswrapper[4911]: E0929 21:41:54.962036 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9c4944f-6978-483c-a49c-5e0dc98ca4af" containerName="init" Sep 29 21:41:54 crc kubenswrapper[4911]: I0929 21:41:54.962043 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9c4944f-6978-483c-a49c-5e0dc98ca4af" containerName="init" Sep 29 21:41:54 crc kubenswrapper[4911]: E0929 21:41:54.962050 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9c4944f-6978-483c-a49c-5e0dc98ca4af" containerName="dnsmasq-dns" Sep 29 21:41:54 crc kubenswrapper[4911]: I0929 21:41:54.962056 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9c4944f-6978-483c-a49c-5e0dc98ca4af" containerName="dnsmasq-dns" Sep 29 21:41:54 crc kubenswrapper[4911]: I0929 21:41:54.962478 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="33ce950e-7ceb-4ba0-b2d1-a9257f4cfcc2" containerName="mariadb-account-create" Sep 29 21:41:54 crc kubenswrapper[4911]: I0929 21:41:54.962522 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e462c34-9385-46b1-9707-87944ea13535" containerName="mariadb-account-create" Sep 29 21:41:54 crc kubenswrapper[4911]: I0929 21:41:54.962536 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9c4944f-6978-483c-a49c-5e0dc98ca4af" containerName="dnsmasq-dns" Sep 29 21:41:54 crc kubenswrapper[4911]: I0929 21:41:54.962545 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="bf8be712-8bd6-403d-b709-015e4be795d8" containerName="mariadb-account-create" Sep 29 21:41:54 crc kubenswrapper[4911]: I0929 21:41:54.963170 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-m4r9b" Sep 29 21:41:54 crc kubenswrapper[4911]: I0929 21:41:54.965662 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Sep 29 21:41:54 crc kubenswrapper[4911]: I0929 21:41:54.965735 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Sep 29 21:41:54 crc kubenswrapper[4911]: I0929 21:41:54.965887 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-sh6vk" Sep 29 21:41:54 crc kubenswrapper[4911]: I0929 21:41:54.979421 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-m4r9b"] Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.035757 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-xpfbw" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.055073 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/373766c8-b8c6-4f57-b43b-24667ddb9564-config-data\") pod \"373766c8-b8c6-4f57-b43b-24667ddb9564\" (UID: \"373766c8-b8c6-4f57-b43b-24667ddb9564\") " Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.055129 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zxq2g\" (UniqueName: \"kubernetes.io/projected/373766c8-b8c6-4f57-b43b-24667ddb9564-kube-api-access-zxq2g\") pod \"373766c8-b8c6-4f57-b43b-24667ddb9564\" (UID: \"373766c8-b8c6-4f57-b43b-24667ddb9564\") " Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.055148 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/373766c8-b8c6-4f57-b43b-24667ddb9564-logs\") pod \"373766c8-b8c6-4f57-b43b-24667ddb9564\" (UID: \"373766c8-b8c6-4f57-b43b-24667ddb9564\") " Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.055216 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/373766c8-b8c6-4f57-b43b-24667ddb9564-scripts\") pod \"373766c8-b8c6-4f57-b43b-24667ddb9564\" (UID: \"373766c8-b8c6-4f57-b43b-24667ddb9564\") " Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.055234 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/373766c8-b8c6-4f57-b43b-24667ddb9564-combined-ca-bundle\") pod \"373766c8-b8c6-4f57-b43b-24667ddb9564\" (UID: \"373766c8-b8c6-4f57-b43b-24667ddb9564\") " Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.055411 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4ptsx\" (UniqueName: \"kubernetes.io/projected/3b39a884-4bfd-4927-af16-6ce025d131fc-kube-api-access-4ptsx\") pod \"cinder-db-sync-m4r9b\" (UID: \"3b39a884-4bfd-4927-af16-6ce025d131fc\") " pod="openstack/cinder-db-sync-m4r9b" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.055439 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3b39a884-4bfd-4927-af16-6ce025d131fc-config-data\") pod \"cinder-db-sync-m4r9b\" (UID: \"3b39a884-4bfd-4927-af16-6ce025d131fc\") " pod="openstack/cinder-db-sync-m4r9b" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.055490 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3b39a884-4bfd-4927-af16-6ce025d131fc-scripts\") pod \"cinder-db-sync-m4r9b\" (UID: \"3b39a884-4bfd-4927-af16-6ce025d131fc\") " pod="openstack/cinder-db-sync-m4r9b" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.055531 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3b39a884-4bfd-4927-af16-6ce025d131fc-etc-machine-id\") pod \"cinder-db-sync-m4r9b\" (UID: \"3b39a884-4bfd-4927-af16-6ce025d131fc\") " pod="openstack/cinder-db-sync-m4r9b" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.055556 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b39a884-4bfd-4927-af16-6ce025d131fc-combined-ca-bundle\") pod \"cinder-db-sync-m4r9b\" (UID: \"3b39a884-4bfd-4927-af16-6ce025d131fc\") " pod="openstack/cinder-db-sync-m4r9b" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.055578 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/3b39a884-4bfd-4927-af16-6ce025d131fc-db-sync-config-data\") pod \"cinder-db-sync-m4r9b\" (UID: \"3b39a884-4bfd-4927-af16-6ce025d131fc\") " pod="openstack/cinder-db-sync-m4r9b" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.063652 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/373766c8-b8c6-4f57-b43b-24667ddb9564-kube-api-access-zxq2g" (OuterVolumeSpecName: "kube-api-access-zxq2g") pod "373766c8-b8c6-4f57-b43b-24667ddb9564" (UID: "373766c8-b8c6-4f57-b43b-24667ddb9564"). InnerVolumeSpecName "kube-api-access-zxq2g". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.064300 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/373766c8-b8c6-4f57-b43b-24667ddb9564-logs" (OuterVolumeSpecName: "logs") pod "373766c8-b8c6-4f57-b43b-24667ddb9564" (UID: "373766c8-b8c6-4f57-b43b-24667ddb9564"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.067059 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/373766c8-b8c6-4f57-b43b-24667ddb9564-scripts" (OuterVolumeSpecName: "scripts") pod "373766c8-b8c6-4f57-b43b-24667ddb9564" (UID: "373766c8-b8c6-4f57-b43b-24667ddb9564"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.070908 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-vkxxs" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.097439 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/373766c8-b8c6-4f57-b43b-24667ddb9564-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "373766c8-b8c6-4f57-b43b-24667ddb9564" (UID: "373766c8-b8c6-4f57-b43b-24667ddb9564"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.118679 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/373766c8-b8c6-4f57-b43b-24667ddb9564-config-data" (OuterVolumeSpecName: "config-data") pod "373766c8-b8c6-4f57-b43b-24667ddb9564" (UID: "373766c8-b8c6-4f57-b43b-24667ddb9564"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.156379 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8099e715-8ed5-40c3-9e20-1a2e873b867a-config-data\") pod \"8099e715-8ed5-40c3-9e20-1a2e873b867a\" (UID: \"8099e715-8ed5-40c3-9e20-1a2e873b867a\") " Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.156460 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8099e715-8ed5-40c3-9e20-1a2e873b867a-scripts\") pod \"8099e715-8ed5-40c3-9e20-1a2e873b867a\" (UID: \"8099e715-8ed5-40c3-9e20-1a2e873b867a\") " Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.156489 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8099e715-8ed5-40c3-9e20-1a2e873b867a-credential-keys\") pod \"8099e715-8ed5-40c3-9e20-1a2e873b867a\" (UID: \"8099e715-8ed5-40c3-9e20-1a2e873b867a\") " Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.156591 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jtxk9\" (UniqueName: \"kubernetes.io/projected/8099e715-8ed5-40c3-9e20-1a2e873b867a-kube-api-access-jtxk9\") pod \"8099e715-8ed5-40c3-9e20-1a2e873b867a\" (UID: \"8099e715-8ed5-40c3-9e20-1a2e873b867a\") " Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.156627 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8099e715-8ed5-40c3-9e20-1a2e873b867a-combined-ca-bundle\") pod \"8099e715-8ed5-40c3-9e20-1a2e873b867a\" (UID: \"8099e715-8ed5-40c3-9e20-1a2e873b867a\") " Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.156660 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8099e715-8ed5-40c3-9e20-1a2e873b867a-fernet-keys\") pod \"8099e715-8ed5-40c3-9e20-1a2e873b867a\" (UID: \"8099e715-8ed5-40c3-9e20-1a2e873b867a\") " Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.156881 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4ptsx\" (UniqueName: \"kubernetes.io/projected/3b39a884-4bfd-4927-af16-6ce025d131fc-kube-api-access-4ptsx\") pod \"cinder-db-sync-m4r9b\" (UID: \"3b39a884-4bfd-4927-af16-6ce025d131fc\") " pod="openstack/cinder-db-sync-m4r9b" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.156908 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3b39a884-4bfd-4927-af16-6ce025d131fc-config-data\") pod \"cinder-db-sync-m4r9b\" (UID: \"3b39a884-4bfd-4927-af16-6ce025d131fc\") " pod="openstack/cinder-db-sync-m4r9b" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.156971 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3b39a884-4bfd-4927-af16-6ce025d131fc-scripts\") pod \"cinder-db-sync-m4r9b\" (UID: \"3b39a884-4bfd-4927-af16-6ce025d131fc\") " pod="openstack/cinder-db-sync-m4r9b" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.157013 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3b39a884-4bfd-4927-af16-6ce025d131fc-etc-machine-id\") pod \"cinder-db-sync-m4r9b\" (UID: \"3b39a884-4bfd-4927-af16-6ce025d131fc\") " pod="openstack/cinder-db-sync-m4r9b" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.157041 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b39a884-4bfd-4927-af16-6ce025d131fc-combined-ca-bundle\") pod \"cinder-db-sync-m4r9b\" (UID: \"3b39a884-4bfd-4927-af16-6ce025d131fc\") " pod="openstack/cinder-db-sync-m4r9b" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.157062 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/3b39a884-4bfd-4927-af16-6ce025d131fc-db-sync-config-data\") pod \"cinder-db-sync-m4r9b\" (UID: \"3b39a884-4bfd-4927-af16-6ce025d131fc\") " pod="openstack/cinder-db-sync-m4r9b" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.157135 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/373766c8-b8c6-4f57-b43b-24667ddb9564-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.157151 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zxq2g\" (UniqueName: \"kubernetes.io/projected/373766c8-b8c6-4f57-b43b-24667ddb9564-kube-api-access-zxq2g\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.157162 4911 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/373766c8-b8c6-4f57-b43b-24667ddb9564-logs\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.157170 4911 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/373766c8-b8c6-4f57-b43b-24667ddb9564-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.157178 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/373766c8-b8c6-4f57-b43b-24667ddb9564-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.157605 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3b39a884-4bfd-4927-af16-6ce025d131fc-etc-machine-id\") pod \"cinder-db-sync-m4r9b\" (UID: \"3b39a884-4bfd-4927-af16-6ce025d131fc\") " pod="openstack/cinder-db-sync-m4r9b" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.160270 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8099e715-8ed5-40c3-9e20-1a2e873b867a-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "8099e715-8ed5-40c3-9e20-1a2e873b867a" (UID: "8099e715-8ed5-40c3-9e20-1a2e873b867a"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.166389 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8099e715-8ed5-40c3-9e20-1a2e873b867a-scripts" (OuterVolumeSpecName: "scripts") pod "8099e715-8ed5-40c3-9e20-1a2e873b867a" (UID: "8099e715-8ed5-40c3-9e20-1a2e873b867a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.166432 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8099e715-8ed5-40c3-9e20-1a2e873b867a-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "8099e715-8ed5-40c3-9e20-1a2e873b867a" (UID: "8099e715-8ed5-40c3-9e20-1a2e873b867a"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.167346 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3b39a884-4bfd-4927-af16-6ce025d131fc-config-data\") pod \"cinder-db-sync-m4r9b\" (UID: \"3b39a884-4bfd-4927-af16-6ce025d131fc\") " pod="openstack/cinder-db-sync-m4r9b" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.179924 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b39a884-4bfd-4927-af16-6ce025d131fc-combined-ca-bundle\") pod \"cinder-db-sync-m4r9b\" (UID: \"3b39a884-4bfd-4927-af16-6ce025d131fc\") " pod="openstack/cinder-db-sync-m4r9b" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.180520 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8099e715-8ed5-40c3-9e20-1a2e873b867a-kube-api-access-jtxk9" (OuterVolumeSpecName: "kube-api-access-jtxk9") pod "8099e715-8ed5-40c3-9e20-1a2e873b867a" (UID: "8099e715-8ed5-40c3-9e20-1a2e873b867a"). InnerVolumeSpecName "kube-api-access-jtxk9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.184011 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4ptsx\" (UniqueName: \"kubernetes.io/projected/3b39a884-4bfd-4927-af16-6ce025d131fc-kube-api-access-4ptsx\") pod \"cinder-db-sync-m4r9b\" (UID: \"3b39a884-4bfd-4927-af16-6ce025d131fc\") " pod="openstack/cinder-db-sync-m4r9b" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.185351 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3b39a884-4bfd-4927-af16-6ce025d131fc-scripts\") pod \"cinder-db-sync-m4r9b\" (UID: \"3b39a884-4bfd-4927-af16-6ce025d131fc\") " pod="openstack/cinder-db-sync-m4r9b" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.186521 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/3b39a884-4bfd-4927-af16-6ce025d131fc-db-sync-config-data\") pod \"cinder-db-sync-m4r9b\" (UID: \"3b39a884-4bfd-4927-af16-6ce025d131fc\") " pod="openstack/cinder-db-sync-m4r9b" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.190519 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-m4r9b" Sep 29 21:41:55 crc kubenswrapper[4911]: E0929 21:41:55.202526 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8099e715-8ed5-40c3-9e20-1a2e873b867a-config-data podName:8099e715-8ed5-40c3-9e20-1a2e873b867a nodeName:}" failed. No retries permitted until 2025-09-29 21:41:55.702497927 +0000 UTC m=+993.679610598 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "config-data" (UniqueName: "kubernetes.io/secret/8099e715-8ed5-40c3-9e20-1a2e873b867a-config-data") pod "8099e715-8ed5-40c3-9e20-1a2e873b867a" (UID: "8099e715-8ed5-40c3-9e20-1a2e873b867a") : error deleting /var/lib/kubelet/pods/8099e715-8ed5-40c3-9e20-1a2e873b867a/volume-subpaths: remove /var/lib/kubelet/pods/8099e715-8ed5-40c3-9e20-1a2e873b867a/volume-subpaths: no such file or directory Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.205065 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8099e715-8ed5-40c3-9e20-1a2e873b867a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8099e715-8ed5-40c3-9e20-1a2e873b867a" (UID: "8099e715-8ed5-40c3-9e20-1a2e873b867a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.246430 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-rm86n"] Sep 29 21:41:55 crc kubenswrapper[4911]: E0929 21:41:55.246778 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8099e715-8ed5-40c3-9e20-1a2e873b867a" containerName="keystone-bootstrap" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.246813 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="8099e715-8ed5-40c3-9e20-1a2e873b867a" containerName="keystone-bootstrap" Sep 29 21:41:55 crc kubenswrapper[4911]: E0929 21:41:55.246836 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="373766c8-b8c6-4f57-b43b-24667ddb9564" containerName="placement-db-sync" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.246844 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="373766c8-b8c6-4f57-b43b-24667ddb9564" containerName="placement-db-sync" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.247117 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="373766c8-b8c6-4f57-b43b-24667ddb9564" containerName="placement-db-sync" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.247140 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="8099e715-8ed5-40c3-9e20-1a2e873b867a" containerName="keystone-bootstrap" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.247810 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-rm86n" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.251100 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.251297 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-v2jnd" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.257312 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-rm86n"] Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.258364 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67b23e82-069b-48d1-b154-5280fea52947-combined-ca-bundle\") pod \"barbican-db-sync-rm86n\" (UID: \"67b23e82-069b-48d1-b154-5280fea52947\") " pod="openstack/barbican-db-sync-rm86n" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.258418 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/67b23e82-069b-48d1-b154-5280fea52947-db-sync-config-data\") pod \"barbican-db-sync-rm86n\" (UID: \"67b23e82-069b-48d1-b154-5280fea52947\") " pod="openstack/barbican-db-sync-rm86n" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.258635 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lj9lm\" (UniqueName: \"kubernetes.io/projected/67b23e82-069b-48d1-b154-5280fea52947-kube-api-access-lj9lm\") pod \"barbican-db-sync-rm86n\" (UID: \"67b23e82-069b-48d1-b154-5280fea52947\") " pod="openstack/barbican-db-sync-rm86n" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.258909 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jtxk9\" (UniqueName: \"kubernetes.io/projected/8099e715-8ed5-40c3-9e20-1a2e873b867a-kube-api-access-jtxk9\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.258932 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8099e715-8ed5-40c3-9e20-1a2e873b867a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.258946 4911 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8099e715-8ed5-40c3-9e20-1a2e873b867a-fernet-keys\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.258958 4911 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8099e715-8ed5-40c3-9e20-1a2e873b867a-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.258969 4911 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8099e715-8ed5-40c3-9e20-1a2e873b867a-credential-keys\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.360601 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67b23e82-069b-48d1-b154-5280fea52947-combined-ca-bundle\") pod \"barbican-db-sync-rm86n\" (UID: \"67b23e82-069b-48d1-b154-5280fea52947\") " pod="openstack/barbican-db-sync-rm86n" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.360657 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/67b23e82-069b-48d1-b154-5280fea52947-db-sync-config-data\") pod \"barbican-db-sync-rm86n\" (UID: \"67b23e82-069b-48d1-b154-5280fea52947\") " pod="openstack/barbican-db-sync-rm86n" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.360709 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lj9lm\" (UniqueName: \"kubernetes.io/projected/67b23e82-069b-48d1-b154-5280fea52947-kube-api-access-lj9lm\") pod \"barbican-db-sync-rm86n\" (UID: \"67b23e82-069b-48d1-b154-5280fea52947\") " pod="openstack/barbican-db-sync-rm86n" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.365922 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67b23e82-069b-48d1-b154-5280fea52947-combined-ca-bundle\") pod \"barbican-db-sync-rm86n\" (UID: \"67b23e82-069b-48d1-b154-5280fea52947\") " pod="openstack/barbican-db-sync-rm86n" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.366529 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/67b23e82-069b-48d1-b154-5280fea52947-db-sync-config-data\") pod \"barbican-db-sync-rm86n\" (UID: \"67b23e82-069b-48d1-b154-5280fea52947\") " pod="openstack/barbican-db-sync-rm86n" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.377244 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lj9lm\" (UniqueName: \"kubernetes.io/projected/67b23e82-069b-48d1-b154-5280fea52947-kube-api-access-lj9lm\") pod \"barbican-db-sync-rm86n\" (UID: \"67b23e82-069b-48d1-b154-5280fea52947\") " pod="openstack/barbican-db-sync-rm86n" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.450957 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-nt89p"] Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.452085 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-nt89p" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.454203 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-4twj6" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.455069 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.455585 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.461959 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-nt89p"] Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.462617 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84edca47-0d64-4494-b7a9-84712fb6515b-combined-ca-bundle\") pod \"neutron-db-sync-nt89p\" (UID: \"84edca47-0d64-4494-b7a9-84712fb6515b\") " pod="openstack/neutron-db-sync-nt89p" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.462700 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5sxwg\" (UniqueName: \"kubernetes.io/projected/84edca47-0d64-4494-b7a9-84712fb6515b-kube-api-access-5sxwg\") pod \"neutron-db-sync-nt89p\" (UID: \"84edca47-0d64-4494-b7a9-84712fb6515b\") " pod="openstack/neutron-db-sync-nt89p" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.462759 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/84edca47-0d64-4494-b7a9-84712fb6515b-config\") pod \"neutron-db-sync-nt89p\" (UID: \"84edca47-0d64-4494-b7a9-84712fb6515b\") " pod="openstack/neutron-db-sync-nt89p" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.565159 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84edca47-0d64-4494-b7a9-84712fb6515b-combined-ca-bundle\") pod \"neutron-db-sync-nt89p\" (UID: \"84edca47-0d64-4494-b7a9-84712fb6515b\") " pod="openstack/neutron-db-sync-nt89p" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.565441 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5sxwg\" (UniqueName: \"kubernetes.io/projected/84edca47-0d64-4494-b7a9-84712fb6515b-kube-api-access-5sxwg\") pod \"neutron-db-sync-nt89p\" (UID: \"84edca47-0d64-4494-b7a9-84712fb6515b\") " pod="openstack/neutron-db-sync-nt89p" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.565470 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/84edca47-0d64-4494-b7a9-84712fb6515b-config\") pod \"neutron-db-sync-nt89p\" (UID: \"84edca47-0d64-4494-b7a9-84712fb6515b\") " pod="openstack/neutron-db-sync-nt89p" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.570941 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/84edca47-0d64-4494-b7a9-84712fb6515b-config\") pod \"neutron-db-sync-nt89p\" (UID: \"84edca47-0d64-4494-b7a9-84712fb6515b\") " pod="openstack/neutron-db-sync-nt89p" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.571941 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84edca47-0d64-4494-b7a9-84712fb6515b-combined-ca-bundle\") pod \"neutron-db-sync-nt89p\" (UID: \"84edca47-0d64-4494-b7a9-84712fb6515b\") " pod="openstack/neutron-db-sync-nt89p" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.593044 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-rm86n" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.597976 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5sxwg\" (UniqueName: \"kubernetes.io/projected/84edca47-0d64-4494-b7a9-84712fb6515b-kube-api-access-5sxwg\") pod \"neutron-db-sync-nt89p\" (UID: \"84edca47-0d64-4494-b7a9-84712fb6515b\") " pod="openstack/neutron-db-sync-nt89p" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.671593 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-m4r9b"] Sep 29 21:41:55 crc kubenswrapper[4911]: W0929 21:41:55.679025 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3b39a884_4bfd_4927_af16_6ce025d131fc.slice/crio-05f8d100d16ebda6b16163bd410a9a45ca832799f040959f4736c3d46147db4a WatchSource:0}: Error finding container 05f8d100d16ebda6b16163bd410a9a45ca832799f040959f4736c3d46147db4a: Status 404 returned error can't find the container with id 05f8d100d16ebda6b16163bd410a9a45ca832799f040959f4736c3d46147db4a Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.765447 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-xpfbw" event={"ID":"373766c8-b8c6-4f57-b43b-24667ddb9564","Type":"ContainerDied","Data":"db8661e0457b5029728442896b9db088b3fe459e14c8c996911a9647436ddc32"} Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.765485 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="db8661e0457b5029728442896b9db088b3fe459e14c8c996911a9647436ddc32" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.765565 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-xpfbw" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.772441 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8099e715-8ed5-40c3-9e20-1a2e873b867a-config-data\") pod \"8099e715-8ed5-40c3-9e20-1a2e873b867a\" (UID: \"8099e715-8ed5-40c3-9e20-1a2e873b867a\") " Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.777074 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-nt89p" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.778902 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8099e715-8ed5-40c3-9e20-1a2e873b867a-config-data" (OuterVolumeSpecName: "config-data") pod "8099e715-8ed5-40c3-9e20-1a2e873b867a" (UID: "8099e715-8ed5-40c3-9e20-1a2e873b867a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.779042 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d12bb9a1-941b-41be-8cb9-3f274e27e497","Type":"ContainerStarted","Data":"0f44a99bfb5ab8fe5bae18f0f945ff2763187103c64299f9ff28ffa2da00b061"} Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.786591 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-vkxxs" event={"ID":"8099e715-8ed5-40c3-9e20-1a2e873b867a","Type":"ContainerDied","Data":"4ecb54cb9b450b1b5decc8f810c9fcb5b73fb2590f3dae198fb3302521c32647"} Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.786640 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4ecb54cb9b450b1b5decc8f810c9fcb5b73fb2590f3dae198fb3302521c32647" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.786719 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-vkxxs" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.817890 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-m4r9b" event={"ID":"3b39a884-4bfd-4927-af16-6ce025d131fc","Type":"ContainerStarted","Data":"05f8d100d16ebda6b16163bd410a9a45ca832799f040959f4736c3d46147db4a"} Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.847857 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-7fbf6b8688-rgddw"] Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.848938 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-7fbf6b8688-rgddw" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.853684 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.853706 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.853980 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-bg252" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.854129 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-7fbf6b8688-rgddw"] Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.854221 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.854351 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.854589 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.875123 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0aeb1728-fd0e-46cd-ba53-8cba740a66ee-scripts\") pod \"keystone-7fbf6b8688-rgddw\" (UID: \"0aeb1728-fd0e-46cd-ba53-8cba740a66ee\") " pod="openstack/keystone-7fbf6b8688-rgddw" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.875443 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0aeb1728-fd0e-46cd-ba53-8cba740a66ee-config-data\") pod \"keystone-7fbf6b8688-rgddw\" (UID: \"0aeb1728-fd0e-46cd-ba53-8cba740a66ee\") " pod="openstack/keystone-7fbf6b8688-rgddw" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.875465 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0aeb1728-fd0e-46cd-ba53-8cba740a66ee-internal-tls-certs\") pod \"keystone-7fbf6b8688-rgddw\" (UID: \"0aeb1728-fd0e-46cd-ba53-8cba740a66ee\") " pod="openstack/keystone-7fbf6b8688-rgddw" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.875499 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0aeb1728-fd0e-46cd-ba53-8cba740a66ee-combined-ca-bundle\") pod \"keystone-7fbf6b8688-rgddw\" (UID: \"0aeb1728-fd0e-46cd-ba53-8cba740a66ee\") " pod="openstack/keystone-7fbf6b8688-rgddw" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.875517 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/0aeb1728-fd0e-46cd-ba53-8cba740a66ee-credential-keys\") pod \"keystone-7fbf6b8688-rgddw\" (UID: \"0aeb1728-fd0e-46cd-ba53-8cba740a66ee\") " pod="openstack/keystone-7fbf6b8688-rgddw" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.875552 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vkwxj\" (UniqueName: \"kubernetes.io/projected/0aeb1728-fd0e-46cd-ba53-8cba740a66ee-kube-api-access-vkwxj\") pod \"keystone-7fbf6b8688-rgddw\" (UID: \"0aeb1728-fd0e-46cd-ba53-8cba740a66ee\") " pod="openstack/keystone-7fbf6b8688-rgddw" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.875608 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0aeb1728-fd0e-46cd-ba53-8cba740a66ee-fernet-keys\") pod \"keystone-7fbf6b8688-rgddw\" (UID: \"0aeb1728-fd0e-46cd-ba53-8cba740a66ee\") " pod="openstack/keystone-7fbf6b8688-rgddw" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.875641 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0aeb1728-fd0e-46cd-ba53-8cba740a66ee-public-tls-certs\") pod \"keystone-7fbf6b8688-rgddw\" (UID: \"0aeb1728-fd0e-46cd-ba53-8cba740a66ee\") " pod="openstack/keystone-7fbf6b8688-rgddw" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.875711 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8099e715-8ed5-40c3-9e20-1a2e873b867a-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.978555 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0aeb1728-fd0e-46cd-ba53-8cba740a66ee-config-data\") pod \"keystone-7fbf6b8688-rgddw\" (UID: \"0aeb1728-fd0e-46cd-ba53-8cba740a66ee\") " pod="openstack/keystone-7fbf6b8688-rgddw" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.978585 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0aeb1728-fd0e-46cd-ba53-8cba740a66ee-internal-tls-certs\") pod \"keystone-7fbf6b8688-rgddw\" (UID: \"0aeb1728-fd0e-46cd-ba53-8cba740a66ee\") " pod="openstack/keystone-7fbf6b8688-rgddw" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.978614 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/0aeb1728-fd0e-46cd-ba53-8cba740a66ee-credential-keys\") pod \"keystone-7fbf6b8688-rgddw\" (UID: \"0aeb1728-fd0e-46cd-ba53-8cba740a66ee\") " pod="openstack/keystone-7fbf6b8688-rgddw" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.978629 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0aeb1728-fd0e-46cd-ba53-8cba740a66ee-combined-ca-bundle\") pod \"keystone-7fbf6b8688-rgddw\" (UID: \"0aeb1728-fd0e-46cd-ba53-8cba740a66ee\") " pod="openstack/keystone-7fbf6b8688-rgddw" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.978664 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vkwxj\" (UniqueName: \"kubernetes.io/projected/0aeb1728-fd0e-46cd-ba53-8cba740a66ee-kube-api-access-vkwxj\") pod \"keystone-7fbf6b8688-rgddw\" (UID: \"0aeb1728-fd0e-46cd-ba53-8cba740a66ee\") " pod="openstack/keystone-7fbf6b8688-rgddw" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.978713 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0aeb1728-fd0e-46cd-ba53-8cba740a66ee-fernet-keys\") pod \"keystone-7fbf6b8688-rgddw\" (UID: \"0aeb1728-fd0e-46cd-ba53-8cba740a66ee\") " pod="openstack/keystone-7fbf6b8688-rgddw" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.978741 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0aeb1728-fd0e-46cd-ba53-8cba740a66ee-public-tls-certs\") pod \"keystone-7fbf6b8688-rgddw\" (UID: \"0aeb1728-fd0e-46cd-ba53-8cba740a66ee\") " pod="openstack/keystone-7fbf6b8688-rgddw" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.979454 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0aeb1728-fd0e-46cd-ba53-8cba740a66ee-scripts\") pod \"keystone-7fbf6b8688-rgddw\" (UID: \"0aeb1728-fd0e-46cd-ba53-8cba740a66ee\") " pod="openstack/keystone-7fbf6b8688-rgddw" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.983828 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0aeb1728-fd0e-46cd-ba53-8cba740a66ee-internal-tls-certs\") pod \"keystone-7fbf6b8688-rgddw\" (UID: \"0aeb1728-fd0e-46cd-ba53-8cba740a66ee\") " pod="openstack/keystone-7fbf6b8688-rgddw" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.984668 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0aeb1728-fd0e-46cd-ba53-8cba740a66ee-scripts\") pod \"keystone-7fbf6b8688-rgddw\" (UID: \"0aeb1728-fd0e-46cd-ba53-8cba740a66ee\") " pod="openstack/keystone-7fbf6b8688-rgddw" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.986707 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0aeb1728-fd0e-46cd-ba53-8cba740a66ee-public-tls-certs\") pod \"keystone-7fbf6b8688-rgddw\" (UID: \"0aeb1728-fd0e-46cd-ba53-8cba740a66ee\") " pod="openstack/keystone-7fbf6b8688-rgddw" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.990169 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/0aeb1728-fd0e-46cd-ba53-8cba740a66ee-credential-keys\") pod \"keystone-7fbf6b8688-rgddw\" (UID: \"0aeb1728-fd0e-46cd-ba53-8cba740a66ee\") " pod="openstack/keystone-7fbf6b8688-rgddw" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.992161 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0aeb1728-fd0e-46cd-ba53-8cba740a66ee-combined-ca-bundle\") pod \"keystone-7fbf6b8688-rgddw\" (UID: \"0aeb1728-fd0e-46cd-ba53-8cba740a66ee\") " pod="openstack/keystone-7fbf6b8688-rgddw" Sep 29 21:41:55 crc kubenswrapper[4911]: I0929 21:41:55.994101 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0aeb1728-fd0e-46cd-ba53-8cba740a66ee-fernet-keys\") pod \"keystone-7fbf6b8688-rgddw\" (UID: \"0aeb1728-fd0e-46cd-ba53-8cba740a66ee\") " pod="openstack/keystone-7fbf6b8688-rgddw" Sep 29 21:41:56 crc kubenswrapper[4911]: I0929 21:41:56.002330 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0aeb1728-fd0e-46cd-ba53-8cba740a66ee-config-data\") pod \"keystone-7fbf6b8688-rgddw\" (UID: \"0aeb1728-fd0e-46cd-ba53-8cba740a66ee\") " pod="openstack/keystone-7fbf6b8688-rgddw" Sep 29 21:41:56 crc kubenswrapper[4911]: I0929 21:41:56.015309 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vkwxj\" (UniqueName: \"kubernetes.io/projected/0aeb1728-fd0e-46cd-ba53-8cba740a66ee-kube-api-access-vkwxj\") pod \"keystone-7fbf6b8688-rgddw\" (UID: \"0aeb1728-fd0e-46cd-ba53-8cba740a66ee\") " pod="openstack/keystone-7fbf6b8688-rgddw" Sep 29 21:41:56 crc kubenswrapper[4911]: I0929 21:41:56.121268 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-65555f7b56-th6vc"] Sep 29 21:41:56 crc kubenswrapper[4911]: I0929 21:41:56.122509 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-65555f7b56-th6vc" Sep 29 21:41:56 crc kubenswrapper[4911]: I0929 21:41:56.124569 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Sep 29 21:41:56 crc kubenswrapper[4911]: I0929 21:41:56.124814 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Sep 29 21:41:56 crc kubenswrapper[4911]: I0929 21:41:56.125597 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Sep 29 21:41:56 crc kubenswrapper[4911]: I0929 21:41:56.126456 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-6hf8s" Sep 29 21:41:56 crc kubenswrapper[4911]: I0929 21:41:56.134172 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Sep 29 21:41:56 crc kubenswrapper[4911]: I0929 21:41:56.142600 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-65555f7b56-th6vc"] Sep 29 21:41:56 crc kubenswrapper[4911]: I0929 21:41:56.167782 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-7fbf6b8688-rgddw" Sep 29 21:41:56 crc kubenswrapper[4911]: I0929 21:41:56.168428 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-rm86n"] Sep 29 21:41:56 crc kubenswrapper[4911]: I0929 21:41:56.183285 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c6qv9\" (UniqueName: \"kubernetes.io/projected/00d05bc5-6a5d-4a65-8afa-8d9ea429e2c1-kube-api-access-c6qv9\") pod \"placement-65555f7b56-th6vc\" (UID: \"00d05bc5-6a5d-4a65-8afa-8d9ea429e2c1\") " pod="openstack/placement-65555f7b56-th6vc" Sep 29 21:41:56 crc kubenswrapper[4911]: I0929 21:41:56.183328 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/00d05bc5-6a5d-4a65-8afa-8d9ea429e2c1-scripts\") pod \"placement-65555f7b56-th6vc\" (UID: \"00d05bc5-6a5d-4a65-8afa-8d9ea429e2c1\") " pod="openstack/placement-65555f7b56-th6vc" Sep 29 21:41:56 crc kubenswrapper[4911]: I0929 21:41:56.183357 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00d05bc5-6a5d-4a65-8afa-8d9ea429e2c1-combined-ca-bundle\") pod \"placement-65555f7b56-th6vc\" (UID: \"00d05bc5-6a5d-4a65-8afa-8d9ea429e2c1\") " pod="openstack/placement-65555f7b56-th6vc" Sep 29 21:41:56 crc kubenswrapper[4911]: I0929 21:41:56.183374 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/00d05bc5-6a5d-4a65-8afa-8d9ea429e2c1-internal-tls-certs\") pod \"placement-65555f7b56-th6vc\" (UID: \"00d05bc5-6a5d-4a65-8afa-8d9ea429e2c1\") " pod="openstack/placement-65555f7b56-th6vc" Sep 29 21:41:56 crc kubenswrapper[4911]: I0929 21:41:56.183404 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/00d05bc5-6a5d-4a65-8afa-8d9ea429e2c1-public-tls-certs\") pod \"placement-65555f7b56-th6vc\" (UID: \"00d05bc5-6a5d-4a65-8afa-8d9ea429e2c1\") " pod="openstack/placement-65555f7b56-th6vc" Sep 29 21:41:56 crc kubenswrapper[4911]: I0929 21:41:56.183473 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/00d05bc5-6a5d-4a65-8afa-8d9ea429e2c1-logs\") pod \"placement-65555f7b56-th6vc\" (UID: \"00d05bc5-6a5d-4a65-8afa-8d9ea429e2c1\") " pod="openstack/placement-65555f7b56-th6vc" Sep 29 21:41:56 crc kubenswrapper[4911]: I0929 21:41:56.183518 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/00d05bc5-6a5d-4a65-8afa-8d9ea429e2c1-config-data\") pod \"placement-65555f7b56-th6vc\" (UID: \"00d05bc5-6a5d-4a65-8afa-8d9ea429e2c1\") " pod="openstack/placement-65555f7b56-th6vc" Sep 29 21:41:56 crc kubenswrapper[4911]: I0929 21:41:56.270982 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-nt89p"] Sep 29 21:41:56 crc kubenswrapper[4911]: I0929 21:41:56.284858 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/00d05bc5-6a5d-4a65-8afa-8d9ea429e2c1-config-data\") pod \"placement-65555f7b56-th6vc\" (UID: \"00d05bc5-6a5d-4a65-8afa-8d9ea429e2c1\") " pod="openstack/placement-65555f7b56-th6vc" Sep 29 21:41:56 crc kubenswrapper[4911]: I0929 21:41:56.285164 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c6qv9\" (UniqueName: \"kubernetes.io/projected/00d05bc5-6a5d-4a65-8afa-8d9ea429e2c1-kube-api-access-c6qv9\") pod \"placement-65555f7b56-th6vc\" (UID: \"00d05bc5-6a5d-4a65-8afa-8d9ea429e2c1\") " pod="openstack/placement-65555f7b56-th6vc" Sep 29 21:41:56 crc kubenswrapper[4911]: I0929 21:41:56.285189 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/00d05bc5-6a5d-4a65-8afa-8d9ea429e2c1-scripts\") pod \"placement-65555f7b56-th6vc\" (UID: \"00d05bc5-6a5d-4a65-8afa-8d9ea429e2c1\") " pod="openstack/placement-65555f7b56-th6vc" Sep 29 21:41:56 crc kubenswrapper[4911]: I0929 21:41:56.285232 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00d05bc5-6a5d-4a65-8afa-8d9ea429e2c1-combined-ca-bundle\") pod \"placement-65555f7b56-th6vc\" (UID: \"00d05bc5-6a5d-4a65-8afa-8d9ea429e2c1\") " pod="openstack/placement-65555f7b56-th6vc" Sep 29 21:41:56 crc kubenswrapper[4911]: I0929 21:41:56.285251 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/00d05bc5-6a5d-4a65-8afa-8d9ea429e2c1-internal-tls-certs\") pod \"placement-65555f7b56-th6vc\" (UID: \"00d05bc5-6a5d-4a65-8afa-8d9ea429e2c1\") " pod="openstack/placement-65555f7b56-th6vc" Sep 29 21:41:56 crc kubenswrapper[4911]: I0929 21:41:56.285295 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/00d05bc5-6a5d-4a65-8afa-8d9ea429e2c1-public-tls-certs\") pod \"placement-65555f7b56-th6vc\" (UID: \"00d05bc5-6a5d-4a65-8afa-8d9ea429e2c1\") " pod="openstack/placement-65555f7b56-th6vc" Sep 29 21:41:56 crc kubenswrapper[4911]: I0929 21:41:56.285376 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/00d05bc5-6a5d-4a65-8afa-8d9ea429e2c1-logs\") pod \"placement-65555f7b56-th6vc\" (UID: \"00d05bc5-6a5d-4a65-8afa-8d9ea429e2c1\") " pod="openstack/placement-65555f7b56-th6vc" Sep 29 21:41:56 crc kubenswrapper[4911]: I0929 21:41:56.285913 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/00d05bc5-6a5d-4a65-8afa-8d9ea429e2c1-logs\") pod \"placement-65555f7b56-th6vc\" (UID: \"00d05bc5-6a5d-4a65-8afa-8d9ea429e2c1\") " pod="openstack/placement-65555f7b56-th6vc" Sep 29 21:41:56 crc kubenswrapper[4911]: I0929 21:41:56.289915 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00d05bc5-6a5d-4a65-8afa-8d9ea429e2c1-combined-ca-bundle\") pod \"placement-65555f7b56-th6vc\" (UID: \"00d05bc5-6a5d-4a65-8afa-8d9ea429e2c1\") " pod="openstack/placement-65555f7b56-th6vc" Sep 29 21:41:56 crc kubenswrapper[4911]: I0929 21:41:56.293099 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/00d05bc5-6a5d-4a65-8afa-8d9ea429e2c1-scripts\") pod \"placement-65555f7b56-th6vc\" (UID: \"00d05bc5-6a5d-4a65-8afa-8d9ea429e2c1\") " pod="openstack/placement-65555f7b56-th6vc" Sep 29 21:41:56 crc kubenswrapper[4911]: I0929 21:41:56.293309 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/00d05bc5-6a5d-4a65-8afa-8d9ea429e2c1-public-tls-certs\") pod \"placement-65555f7b56-th6vc\" (UID: \"00d05bc5-6a5d-4a65-8afa-8d9ea429e2c1\") " pod="openstack/placement-65555f7b56-th6vc" Sep 29 21:41:56 crc kubenswrapper[4911]: I0929 21:41:56.293429 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/00d05bc5-6a5d-4a65-8afa-8d9ea429e2c1-internal-tls-certs\") pod \"placement-65555f7b56-th6vc\" (UID: \"00d05bc5-6a5d-4a65-8afa-8d9ea429e2c1\") " pod="openstack/placement-65555f7b56-th6vc" Sep 29 21:41:56 crc kubenswrapper[4911]: I0929 21:41:56.293742 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/00d05bc5-6a5d-4a65-8afa-8d9ea429e2c1-config-data\") pod \"placement-65555f7b56-th6vc\" (UID: \"00d05bc5-6a5d-4a65-8afa-8d9ea429e2c1\") " pod="openstack/placement-65555f7b56-th6vc" Sep 29 21:41:56 crc kubenswrapper[4911]: I0929 21:41:56.301401 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c6qv9\" (UniqueName: \"kubernetes.io/projected/00d05bc5-6a5d-4a65-8afa-8d9ea429e2c1-kube-api-access-c6qv9\") pod \"placement-65555f7b56-th6vc\" (UID: \"00d05bc5-6a5d-4a65-8afa-8d9ea429e2c1\") " pod="openstack/placement-65555f7b56-th6vc" Sep 29 21:41:56 crc kubenswrapper[4911]: I0929 21:41:56.437652 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-65555f7b56-th6vc" Sep 29 21:41:56 crc kubenswrapper[4911]: I0929 21:41:56.593809 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-7fbf6b8688-rgddw"] Sep 29 21:41:56 crc kubenswrapper[4911]: I0929 21:41:56.832160 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-7fbf6b8688-rgddw" event={"ID":"0aeb1728-fd0e-46cd-ba53-8cba740a66ee","Type":"ContainerStarted","Data":"e469800f00ec2445e1525479667bfa2ee9f3cc3eeb78a81457e58fdcce9741c7"} Sep 29 21:41:56 crc kubenswrapper[4911]: I0929 21:41:56.834691 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-nt89p" event={"ID":"84edca47-0d64-4494-b7a9-84712fb6515b","Type":"ContainerStarted","Data":"9005c2a1ba04eeb6a6c36c68be084652f844eebc07be3aac6ad5f0dab442de10"} Sep 29 21:41:56 crc kubenswrapper[4911]: I0929 21:41:56.834721 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-nt89p" event={"ID":"84edca47-0d64-4494-b7a9-84712fb6515b","Type":"ContainerStarted","Data":"69bd4600686afdb69a55ecede86732d270b14bc4b373f5466b8ba6f8d6d09cb1"} Sep 29 21:41:56 crc kubenswrapper[4911]: I0929 21:41:56.837608 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-rm86n" event={"ID":"67b23e82-069b-48d1-b154-5280fea52947","Type":"ContainerStarted","Data":"5c6faa451a34c766ae764ee21f2fa5fce46741ae7f7592890b0d7e2bf4af14ae"} Sep 29 21:41:56 crc kubenswrapper[4911]: I0929 21:41:56.857924 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-nt89p" podStartSLOduration=1.857904902 podStartE2EDuration="1.857904902s" podCreationTimestamp="2025-09-29 21:41:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:41:56.847936752 +0000 UTC m=+994.825049433" watchObservedRunningTime="2025-09-29 21:41:56.857904902 +0000 UTC m=+994.835017573" Sep 29 21:41:56 crc kubenswrapper[4911]: I0929 21:41:56.908705 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-65555f7b56-th6vc"] Sep 29 21:41:56 crc kubenswrapper[4911]: W0929 21:41:56.925829 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod00d05bc5_6a5d_4a65_8afa_8d9ea429e2c1.slice/crio-1575b51223e2b6023881a1ebd59bc87931bc57d64a74066a00144a225e2266d4 WatchSource:0}: Error finding container 1575b51223e2b6023881a1ebd59bc87931bc57d64a74066a00144a225e2266d4: Status 404 returned error can't find the container with id 1575b51223e2b6023881a1ebd59bc87931bc57d64a74066a00144a225e2266d4 Sep 29 21:41:57 crc kubenswrapper[4911]: I0929 21:41:57.847982 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-65555f7b56-th6vc" event={"ID":"00d05bc5-6a5d-4a65-8afa-8d9ea429e2c1","Type":"ContainerStarted","Data":"1575b51223e2b6023881a1ebd59bc87931bc57d64a74066a00144a225e2266d4"} Sep 29 21:41:57 crc kubenswrapper[4911]: I0929 21:41:57.852212 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-7fbf6b8688-rgddw" event={"ID":"0aeb1728-fd0e-46cd-ba53-8cba740a66ee","Type":"ContainerStarted","Data":"e0eb028de58b1a6cff5ae3ee50f790603bbd77e0d94bc360da32906fdd8d76b3"} Sep 29 21:41:57 crc kubenswrapper[4911]: I0929 21:41:57.852503 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-7fbf6b8688-rgddw" Sep 29 21:41:57 crc kubenswrapper[4911]: I0929 21:41:57.888317 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-7fbf6b8688-rgddw" podStartSLOduration=2.88829771 podStartE2EDuration="2.88829771s" podCreationTimestamp="2025-09-29 21:41:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:41:57.869855098 +0000 UTC m=+995.846967769" watchObservedRunningTime="2025-09-29 21:41:57.88829771 +0000 UTC m=+995.865410382" Sep 29 21:41:59 crc kubenswrapper[4911]: I0929 21:41:59.275336 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Sep 29 21:41:59 crc kubenswrapper[4911]: I0929 21:41:59.275391 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Sep 29 21:41:59 crc kubenswrapper[4911]: I0929 21:41:59.306190 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Sep 29 21:41:59 crc kubenswrapper[4911]: I0929 21:41:59.308680 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Sep 29 21:41:59 crc kubenswrapper[4911]: I0929 21:41:59.308741 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Sep 29 21:41:59 crc kubenswrapper[4911]: I0929 21:41:59.351829 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Sep 29 21:41:59 crc kubenswrapper[4911]: I0929 21:41:59.355425 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Sep 29 21:41:59 crc kubenswrapper[4911]: I0929 21:41:59.379949 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Sep 29 21:41:59 crc kubenswrapper[4911]: I0929 21:41:59.870479 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-65555f7b56-th6vc" event={"ID":"00d05bc5-6a5d-4a65-8afa-8d9ea429e2c1","Type":"ContainerStarted","Data":"bdcb8e1fbb182462886b7655619fd6b17c84f0de8b0b66502941b78286d1bb6b"} Sep 29 21:41:59 crc kubenswrapper[4911]: I0929 21:41:59.870984 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Sep 29 21:41:59 crc kubenswrapper[4911]: I0929 21:41:59.871020 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Sep 29 21:41:59 crc kubenswrapper[4911]: I0929 21:41:59.871036 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Sep 29 21:41:59 crc kubenswrapper[4911]: I0929 21:41:59.871121 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Sep 29 21:42:01 crc kubenswrapper[4911]: I0929 21:42:01.920375 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Sep 29 21:42:01 crc kubenswrapper[4911]: I0929 21:42:01.920686 4911 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 21:42:01 crc kubenswrapper[4911]: I0929 21:42:01.928399 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Sep 29 21:42:02 crc kubenswrapper[4911]: I0929 21:42:02.024166 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Sep 29 21:42:02 crc kubenswrapper[4911]: I0929 21:42:02.024357 4911 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 21:42:02 crc kubenswrapper[4911]: I0929 21:42:02.033122 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Sep 29 21:42:14 crc kubenswrapper[4911]: I0929 21:42:14.038714 4911 generic.go:334] "Generic (PLEG): container finished" podID="84edca47-0d64-4494-b7a9-84712fb6515b" containerID="9005c2a1ba04eeb6a6c36c68be084652f844eebc07be3aac6ad5f0dab442de10" exitCode=0 Sep 29 21:42:14 crc kubenswrapper[4911]: I0929 21:42:14.038837 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-nt89p" event={"ID":"84edca47-0d64-4494-b7a9-84712fb6515b","Type":"ContainerDied","Data":"9005c2a1ba04eeb6a6c36c68be084652f844eebc07be3aac6ad5f0dab442de10"} Sep 29 21:42:14 crc kubenswrapper[4911]: E0929 21:42:14.268214 4911 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified" Sep 29 21:42:14 crc kubenswrapper[4911]: E0929 21:42:14.268664 4911 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-4ptsx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-m4r9b_openstack(3b39a884-4bfd-4927-af16-6ce025d131fc): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 21:42:14 crc kubenswrapper[4911]: E0929 21:42:14.269985 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-m4r9b" podUID="3b39a884-4bfd-4927-af16-6ce025d131fc" Sep 29 21:42:15 crc kubenswrapper[4911]: E0929 21:42:15.048562 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified\\\"\"" pod="openstack/cinder-db-sync-m4r9b" podUID="3b39a884-4bfd-4927-af16-6ce025d131fc" Sep 29 21:42:15 crc kubenswrapper[4911]: E0929 21:42:15.232187 4911 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/ubi9/httpd-24:latest" Sep 29 21:42:15 crc kubenswrapper[4911]: E0929 21:42:15.232500 4911 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:proxy-httpd,Image:registry.redhat.io/ubi9/httpd-24:latest,Command:[/usr/sbin/httpd],Args:[-DFOREGROUND],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:proxy-httpd,HostPort:0,ContainerPort:3000,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/httpd/conf/httpd.conf,SubPath:httpd.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/httpd/conf.d/ssl.conf,SubPath:ssl.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:run-httpd,ReadOnly:false,MountPath:/run/httpd,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:log-httpd,ReadOnly:false,MountPath:/var/log/httpd,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-wtsmx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/,Port:{0 3000 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:300,TimeoutSeconds:30,PeriodSeconds:30,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/,Port:{0 3000 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:10,TimeoutSeconds:30,PeriodSeconds:30,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ceilometer-0_openstack(d12bb9a1-941b-41be-8cb9-3f274e27e497): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 29 21:42:15 crc kubenswrapper[4911]: E0929 21:42:15.233719 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"proxy-httpd\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openstack/ceilometer-0" podUID="d12bb9a1-941b-41be-8cb9-3f274e27e497" Sep 29 21:42:15 crc kubenswrapper[4911]: I0929 21:42:15.376018 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-nt89p" Sep 29 21:42:15 crc kubenswrapper[4911]: I0929 21:42:15.441457 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5sxwg\" (UniqueName: \"kubernetes.io/projected/84edca47-0d64-4494-b7a9-84712fb6515b-kube-api-access-5sxwg\") pod \"84edca47-0d64-4494-b7a9-84712fb6515b\" (UID: \"84edca47-0d64-4494-b7a9-84712fb6515b\") " Sep 29 21:42:15 crc kubenswrapper[4911]: I0929 21:42:15.441676 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84edca47-0d64-4494-b7a9-84712fb6515b-combined-ca-bundle\") pod \"84edca47-0d64-4494-b7a9-84712fb6515b\" (UID: \"84edca47-0d64-4494-b7a9-84712fb6515b\") " Sep 29 21:42:15 crc kubenswrapper[4911]: I0929 21:42:15.441821 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/84edca47-0d64-4494-b7a9-84712fb6515b-config\") pod \"84edca47-0d64-4494-b7a9-84712fb6515b\" (UID: \"84edca47-0d64-4494-b7a9-84712fb6515b\") " Sep 29 21:42:15 crc kubenswrapper[4911]: I0929 21:42:15.448674 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/84edca47-0d64-4494-b7a9-84712fb6515b-kube-api-access-5sxwg" (OuterVolumeSpecName: "kube-api-access-5sxwg") pod "84edca47-0d64-4494-b7a9-84712fb6515b" (UID: "84edca47-0d64-4494-b7a9-84712fb6515b"). InnerVolumeSpecName "kube-api-access-5sxwg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:42:15 crc kubenswrapper[4911]: I0929 21:42:15.469955 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/84edca47-0d64-4494-b7a9-84712fb6515b-config" (OuterVolumeSpecName: "config") pod "84edca47-0d64-4494-b7a9-84712fb6515b" (UID: "84edca47-0d64-4494-b7a9-84712fb6515b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:42:15 crc kubenswrapper[4911]: I0929 21:42:15.474974 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/84edca47-0d64-4494-b7a9-84712fb6515b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "84edca47-0d64-4494-b7a9-84712fb6515b" (UID: "84edca47-0d64-4494-b7a9-84712fb6515b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:42:15 crc kubenswrapper[4911]: I0929 21:42:15.543853 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5sxwg\" (UniqueName: \"kubernetes.io/projected/84edca47-0d64-4494-b7a9-84712fb6515b-kube-api-access-5sxwg\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:15 crc kubenswrapper[4911]: I0929 21:42:15.543899 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84edca47-0d64-4494-b7a9-84712fb6515b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:15 crc kubenswrapper[4911]: I0929 21:42:15.543911 4911 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/84edca47-0d64-4494-b7a9-84712fb6515b-config\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:16 crc kubenswrapper[4911]: I0929 21:42:16.062748 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-65555f7b56-th6vc" event={"ID":"00d05bc5-6a5d-4a65-8afa-8d9ea429e2c1","Type":"ContainerStarted","Data":"0e3078fe8567ec8b951c84eff1c709b13096dc683a7b3cfe090b4d220c9f0a13"} Sep 29 21:42:16 crc kubenswrapper[4911]: I0929 21:42:16.066218 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-65555f7b56-th6vc" Sep 29 21:42:16 crc kubenswrapper[4911]: I0929 21:42:16.069101 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-65555f7b56-th6vc" Sep 29 21:42:16 crc kubenswrapper[4911]: I0929 21:42:16.077250 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-nt89p" Sep 29 21:42:16 crc kubenswrapper[4911]: I0929 21:42:16.077547 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-nt89p" event={"ID":"84edca47-0d64-4494-b7a9-84712fb6515b","Type":"ContainerDied","Data":"69bd4600686afdb69a55ecede86732d270b14bc4b373f5466b8ba6f8d6d09cb1"} Sep 29 21:42:16 crc kubenswrapper[4911]: I0929 21:42:16.077779 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="69bd4600686afdb69a55ecede86732d270b14bc4b373f5466b8ba6f8d6d09cb1" Sep 29 21:42:16 crc kubenswrapper[4911]: I0929 21:42:16.080186 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d12bb9a1-941b-41be-8cb9-3f274e27e497" containerName="ceilometer-central-agent" containerID="cri-o://939ba4c9fca017dbacc3b9007aca336c5c5ecae178afe623cae8639a7efbffe5" gracePeriod=30 Sep 29 21:42:16 crc kubenswrapper[4911]: I0929 21:42:16.081046 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d12bb9a1-941b-41be-8cb9-3f274e27e497" containerName="ceilometer-notification-agent" containerID="cri-o://97309d3a86a8f19a58eed97212075dc0bbcda623387345233bfabbf6436f474c" gracePeriod=30 Sep 29 21:42:16 crc kubenswrapper[4911]: I0929 21:42:16.081442 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d12bb9a1-941b-41be-8cb9-3f274e27e497" containerName="sg-core" containerID="cri-o://0f44a99bfb5ab8fe5bae18f0f945ff2763187103c64299f9ff28ffa2da00b061" gracePeriod=30 Sep 29 21:42:16 crc kubenswrapper[4911]: I0929 21:42:16.081704 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-rm86n" event={"ID":"67b23e82-069b-48d1-b154-5280fea52947","Type":"ContainerStarted","Data":"9c345e5b5cb1fe927e5550e3460d26da1aee54bbf084b90aaf78c7d478d45088"} Sep 29 21:42:16 crc kubenswrapper[4911]: I0929 21:42:16.118273 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-65555f7b56-th6vc" podStartSLOduration=20.11823743 podStartE2EDuration="20.11823743s" podCreationTimestamp="2025-09-29 21:41:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:42:16.106933188 +0000 UTC m=+1014.084045889" watchObservedRunningTime="2025-09-29 21:42:16.11823743 +0000 UTC m=+1014.095350141" Sep 29 21:42:16 crc kubenswrapper[4911]: I0929 21:42:16.143669 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-rm86n" podStartSLOduration=2.104738487 podStartE2EDuration="21.143643977s" podCreationTimestamp="2025-09-29 21:41:55 +0000 UTC" firstStartedPulling="2025-09-29 21:41:56.18369449 +0000 UTC m=+994.160807161" lastFinishedPulling="2025-09-29 21:42:15.22259997 +0000 UTC m=+1013.199712651" observedRunningTime="2025-09-29 21:42:16.128712784 +0000 UTC m=+1014.105825465" watchObservedRunningTime="2025-09-29 21:42:16.143643977 +0000 UTC m=+1014.120756678" Sep 29 21:42:16 crc kubenswrapper[4911]: I0929 21:42:16.273810 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-84b966f6c9-9c895"] Sep 29 21:42:16 crc kubenswrapper[4911]: E0929 21:42:16.274271 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84edca47-0d64-4494-b7a9-84712fb6515b" containerName="neutron-db-sync" Sep 29 21:42:16 crc kubenswrapper[4911]: I0929 21:42:16.274296 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="84edca47-0d64-4494-b7a9-84712fb6515b" containerName="neutron-db-sync" Sep 29 21:42:16 crc kubenswrapper[4911]: I0929 21:42:16.274545 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="84edca47-0d64-4494-b7a9-84712fb6515b" containerName="neutron-db-sync" Sep 29 21:42:16 crc kubenswrapper[4911]: I0929 21:42:16.275659 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84b966f6c9-9c895" Sep 29 21:42:16 crc kubenswrapper[4911]: I0929 21:42:16.301079 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-84b966f6c9-9c895"] Sep 29 21:42:16 crc kubenswrapper[4911]: I0929 21:42:16.358475 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dd3556aa-f536-44fc-b9b4-ebf1fb064f4e-ovsdbserver-sb\") pod \"dnsmasq-dns-84b966f6c9-9c895\" (UID: \"dd3556aa-f536-44fc-b9b4-ebf1fb064f4e\") " pod="openstack/dnsmasq-dns-84b966f6c9-9c895" Sep 29 21:42:16 crc kubenswrapper[4911]: I0929 21:42:16.358529 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dd3556aa-f536-44fc-b9b4-ebf1fb064f4e-config\") pod \"dnsmasq-dns-84b966f6c9-9c895\" (UID: \"dd3556aa-f536-44fc-b9b4-ebf1fb064f4e\") " pod="openstack/dnsmasq-dns-84b966f6c9-9c895" Sep 29 21:42:16 crc kubenswrapper[4911]: I0929 21:42:16.358557 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dd3556aa-f536-44fc-b9b4-ebf1fb064f4e-dns-svc\") pod \"dnsmasq-dns-84b966f6c9-9c895\" (UID: \"dd3556aa-f536-44fc-b9b4-ebf1fb064f4e\") " pod="openstack/dnsmasq-dns-84b966f6c9-9c895" Sep 29 21:42:16 crc kubenswrapper[4911]: I0929 21:42:16.358713 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dd3556aa-f536-44fc-b9b4-ebf1fb064f4e-ovsdbserver-nb\") pod \"dnsmasq-dns-84b966f6c9-9c895\" (UID: \"dd3556aa-f536-44fc-b9b4-ebf1fb064f4e\") " pod="openstack/dnsmasq-dns-84b966f6c9-9c895" Sep 29 21:42:16 crc kubenswrapper[4911]: I0929 21:42:16.358834 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/dd3556aa-f536-44fc-b9b4-ebf1fb064f4e-dns-swift-storage-0\") pod \"dnsmasq-dns-84b966f6c9-9c895\" (UID: \"dd3556aa-f536-44fc-b9b4-ebf1fb064f4e\") " pod="openstack/dnsmasq-dns-84b966f6c9-9c895" Sep 29 21:42:16 crc kubenswrapper[4911]: I0929 21:42:16.358980 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-prdl8\" (UniqueName: \"kubernetes.io/projected/dd3556aa-f536-44fc-b9b4-ebf1fb064f4e-kube-api-access-prdl8\") pod \"dnsmasq-dns-84b966f6c9-9c895\" (UID: \"dd3556aa-f536-44fc-b9b4-ebf1fb064f4e\") " pod="openstack/dnsmasq-dns-84b966f6c9-9c895" Sep 29 21:42:16 crc kubenswrapper[4911]: I0929 21:42:16.410989 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-5c685495c6-r4w7p"] Sep 29 21:42:16 crc kubenswrapper[4911]: I0929 21:42:16.412286 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5c685495c6-r4w7p" Sep 29 21:42:16 crc kubenswrapper[4911]: I0929 21:42:16.414775 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Sep 29 21:42:16 crc kubenswrapper[4911]: I0929 21:42:16.415580 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-4twj6" Sep 29 21:42:16 crc kubenswrapper[4911]: I0929 21:42:16.416562 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Sep 29 21:42:16 crc kubenswrapper[4911]: I0929 21:42:16.417150 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Sep 29 21:42:16 crc kubenswrapper[4911]: I0929 21:42:16.435023 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5c685495c6-r4w7p"] Sep 29 21:42:16 crc kubenswrapper[4911]: I0929 21:42:16.473256 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dd3556aa-f536-44fc-b9b4-ebf1fb064f4e-config\") pod \"dnsmasq-dns-84b966f6c9-9c895\" (UID: \"dd3556aa-f536-44fc-b9b4-ebf1fb064f4e\") " pod="openstack/dnsmasq-dns-84b966f6c9-9c895" Sep 29 21:42:16 crc kubenswrapper[4911]: I0929 21:42:16.473329 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dd3556aa-f536-44fc-b9b4-ebf1fb064f4e-dns-svc\") pod \"dnsmasq-dns-84b966f6c9-9c895\" (UID: \"dd3556aa-f536-44fc-b9b4-ebf1fb064f4e\") " pod="openstack/dnsmasq-dns-84b966f6c9-9c895" Sep 29 21:42:16 crc kubenswrapper[4911]: I0929 21:42:16.473372 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/98530233-359f-4be3-a540-20553e9cbe30-ovndb-tls-certs\") pod \"neutron-5c685495c6-r4w7p\" (UID: \"98530233-359f-4be3-a540-20553e9cbe30\") " pod="openstack/neutron-5c685495c6-r4w7p" Sep 29 21:42:16 crc kubenswrapper[4911]: I0929 21:42:16.473391 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/98530233-359f-4be3-a540-20553e9cbe30-httpd-config\") pod \"neutron-5c685495c6-r4w7p\" (UID: \"98530233-359f-4be3-a540-20553e9cbe30\") " pod="openstack/neutron-5c685495c6-r4w7p" Sep 29 21:42:16 crc kubenswrapper[4911]: I0929 21:42:16.473429 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dd3556aa-f536-44fc-b9b4-ebf1fb064f4e-ovsdbserver-nb\") pod \"dnsmasq-dns-84b966f6c9-9c895\" (UID: \"dd3556aa-f536-44fc-b9b4-ebf1fb064f4e\") " pod="openstack/dnsmasq-dns-84b966f6c9-9c895" Sep 29 21:42:16 crc kubenswrapper[4911]: I0929 21:42:16.473459 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/98530233-359f-4be3-a540-20553e9cbe30-config\") pod \"neutron-5c685495c6-r4w7p\" (UID: \"98530233-359f-4be3-a540-20553e9cbe30\") " pod="openstack/neutron-5c685495c6-r4w7p" Sep 29 21:42:16 crc kubenswrapper[4911]: I0929 21:42:16.473493 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/dd3556aa-f536-44fc-b9b4-ebf1fb064f4e-dns-swift-storage-0\") pod \"dnsmasq-dns-84b966f6c9-9c895\" (UID: \"dd3556aa-f536-44fc-b9b4-ebf1fb064f4e\") " pod="openstack/dnsmasq-dns-84b966f6c9-9c895" Sep 29 21:42:16 crc kubenswrapper[4911]: I0929 21:42:16.473579 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-prdl8\" (UniqueName: \"kubernetes.io/projected/dd3556aa-f536-44fc-b9b4-ebf1fb064f4e-kube-api-access-prdl8\") pod \"dnsmasq-dns-84b966f6c9-9c895\" (UID: \"dd3556aa-f536-44fc-b9b4-ebf1fb064f4e\") " pod="openstack/dnsmasq-dns-84b966f6c9-9c895" Sep 29 21:42:16 crc kubenswrapper[4911]: I0929 21:42:16.473614 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4d77k\" (UniqueName: \"kubernetes.io/projected/98530233-359f-4be3-a540-20553e9cbe30-kube-api-access-4d77k\") pod \"neutron-5c685495c6-r4w7p\" (UID: \"98530233-359f-4be3-a540-20553e9cbe30\") " pod="openstack/neutron-5c685495c6-r4w7p" Sep 29 21:42:16 crc kubenswrapper[4911]: I0929 21:42:16.473671 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/98530233-359f-4be3-a540-20553e9cbe30-combined-ca-bundle\") pod \"neutron-5c685495c6-r4w7p\" (UID: \"98530233-359f-4be3-a540-20553e9cbe30\") " pod="openstack/neutron-5c685495c6-r4w7p" Sep 29 21:42:16 crc kubenswrapper[4911]: I0929 21:42:16.473725 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dd3556aa-f536-44fc-b9b4-ebf1fb064f4e-ovsdbserver-sb\") pod \"dnsmasq-dns-84b966f6c9-9c895\" (UID: \"dd3556aa-f536-44fc-b9b4-ebf1fb064f4e\") " pod="openstack/dnsmasq-dns-84b966f6c9-9c895" Sep 29 21:42:16 crc kubenswrapper[4911]: I0929 21:42:16.474654 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dd3556aa-f536-44fc-b9b4-ebf1fb064f4e-ovsdbserver-sb\") pod \"dnsmasq-dns-84b966f6c9-9c895\" (UID: \"dd3556aa-f536-44fc-b9b4-ebf1fb064f4e\") " pod="openstack/dnsmasq-dns-84b966f6c9-9c895" Sep 29 21:42:16 crc kubenswrapper[4911]: I0929 21:42:16.475503 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dd3556aa-f536-44fc-b9b4-ebf1fb064f4e-config\") pod \"dnsmasq-dns-84b966f6c9-9c895\" (UID: \"dd3556aa-f536-44fc-b9b4-ebf1fb064f4e\") " pod="openstack/dnsmasq-dns-84b966f6c9-9c895" Sep 29 21:42:16 crc kubenswrapper[4911]: I0929 21:42:16.476270 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dd3556aa-f536-44fc-b9b4-ebf1fb064f4e-dns-svc\") pod \"dnsmasq-dns-84b966f6c9-9c895\" (UID: \"dd3556aa-f536-44fc-b9b4-ebf1fb064f4e\") " pod="openstack/dnsmasq-dns-84b966f6c9-9c895" Sep 29 21:42:16 crc kubenswrapper[4911]: I0929 21:42:16.477232 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dd3556aa-f536-44fc-b9b4-ebf1fb064f4e-ovsdbserver-nb\") pod \"dnsmasq-dns-84b966f6c9-9c895\" (UID: \"dd3556aa-f536-44fc-b9b4-ebf1fb064f4e\") " pod="openstack/dnsmasq-dns-84b966f6c9-9c895" Sep 29 21:42:16 crc kubenswrapper[4911]: I0929 21:42:16.478038 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/dd3556aa-f536-44fc-b9b4-ebf1fb064f4e-dns-swift-storage-0\") pod \"dnsmasq-dns-84b966f6c9-9c895\" (UID: \"dd3556aa-f536-44fc-b9b4-ebf1fb064f4e\") " pod="openstack/dnsmasq-dns-84b966f6c9-9c895" Sep 29 21:42:16 crc kubenswrapper[4911]: I0929 21:42:16.503800 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-prdl8\" (UniqueName: \"kubernetes.io/projected/dd3556aa-f536-44fc-b9b4-ebf1fb064f4e-kube-api-access-prdl8\") pod \"dnsmasq-dns-84b966f6c9-9c895\" (UID: \"dd3556aa-f536-44fc-b9b4-ebf1fb064f4e\") " pod="openstack/dnsmasq-dns-84b966f6c9-9c895" Sep 29 21:42:16 crc kubenswrapper[4911]: I0929 21:42:16.575486 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/98530233-359f-4be3-a540-20553e9cbe30-combined-ca-bundle\") pod \"neutron-5c685495c6-r4w7p\" (UID: \"98530233-359f-4be3-a540-20553e9cbe30\") " pod="openstack/neutron-5c685495c6-r4w7p" Sep 29 21:42:16 crc kubenswrapper[4911]: I0929 21:42:16.575770 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/98530233-359f-4be3-a540-20553e9cbe30-httpd-config\") pod \"neutron-5c685495c6-r4w7p\" (UID: \"98530233-359f-4be3-a540-20553e9cbe30\") " pod="openstack/neutron-5c685495c6-r4w7p" Sep 29 21:42:16 crc kubenswrapper[4911]: I0929 21:42:16.575792 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/98530233-359f-4be3-a540-20553e9cbe30-ovndb-tls-certs\") pod \"neutron-5c685495c6-r4w7p\" (UID: \"98530233-359f-4be3-a540-20553e9cbe30\") " pod="openstack/neutron-5c685495c6-r4w7p" Sep 29 21:42:16 crc kubenswrapper[4911]: I0929 21:42:16.575837 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/98530233-359f-4be3-a540-20553e9cbe30-config\") pod \"neutron-5c685495c6-r4w7p\" (UID: \"98530233-359f-4be3-a540-20553e9cbe30\") " pod="openstack/neutron-5c685495c6-r4w7p" Sep 29 21:42:16 crc kubenswrapper[4911]: I0929 21:42:16.575908 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4d77k\" (UniqueName: \"kubernetes.io/projected/98530233-359f-4be3-a540-20553e9cbe30-kube-api-access-4d77k\") pod \"neutron-5c685495c6-r4w7p\" (UID: \"98530233-359f-4be3-a540-20553e9cbe30\") " pod="openstack/neutron-5c685495c6-r4w7p" Sep 29 21:42:16 crc kubenswrapper[4911]: I0929 21:42:16.581063 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/98530233-359f-4be3-a540-20553e9cbe30-ovndb-tls-certs\") pod \"neutron-5c685495c6-r4w7p\" (UID: \"98530233-359f-4be3-a540-20553e9cbe30\") " pod="openstack/neutron-5c685495c6-r4w7p" Sep 29 21:42:16 crc kubenswrapper[4911]: I0929 21:42:16.581663 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/98530233-359f-4be3-a540-20553e9cbe30-combined-ca-bundle\") pod \"neutron-5c685495c6-r4w7p\" (UID: \"98530233-359f-4be3-a540-20553e9cbe30\") " pod="openstack/neutron-5c685495c6-r4w7p" Sep 29 21:42:16 crc kubenswrapper[4911]: I0929 21:42:16.583462 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/98530233-359f-4be3-a540-20553e9cbe30-httpd-config\") pod \"neutron-5c685495c6-r4w7p\" (UID: \"98530233-359f-4be3-a540-20553e9cbe30\") " pod="openstack/neutron-5c685495c6-r4w7p" Sep 29 21:42:16 crc kubenswrapper[4911]: I0929 21:42:16.593520 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/98530233-359f-4be3-a540-20553e9cbe30-config\") pod \"neutron-5c685495c6-r4w7p\" (UID: \"98530233-359f-4be3-a540-20553e9cbe30\") " pod="openstack/neutron-5c685495c6-r4w7p" Sep 29 21:42:16 crc kubenswrapper[4911]: I0929 21:42:16.595433 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4d77k\" (UniqueName: \"kubernetes.io/projected/98530233-359f-4be3-a540-20553e9cbe30-kube-api-access-4d77k\") pod \"neutron-5c685495c6-r4w7p\" (UID: \"98530233-359f-4be3-a540-20553e9cbe30\") " pod="openstack/neutron-5c685495c6-r4w7p" Sep 29 21:42:16 crc kubenswrapper[4911]: I0929 21:42:16.595800 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84b966f6c9-9c895" Sep 29 21:42:16 crc kubenswrapper[4911]: I0929 21:42:16.728584 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5c685495c6-r4w7p" Sep 29 21:42:17 crc kubenswrapper[4911]: I0929 21:42:17.047149 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-84b966f6c9-9c895"] Sep 29 21:42:17 crc kubenswrapper[4911]: I0929 21:42:17.092597 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84b966f6c9-9c895" event={"ID":"dd3556aa-f536-44fc-b9b4-ebf1fb064f4e","Type":"ContainerStarted","Data":"167bd3bb23ce3adb7d17dc8f178b8c16527f35438e3fac01613db119c872157a"} Sep 29 21:42:17 crc kubenswrapper[4911]: I0929 21:42:17.096756 4911 generic.go:334] "Generic (PLEG): container finished" podID="d12bb9a1-941b-41be-8cb9-3f274e27e497" containerID="0f44a99bfb5ab8fe5bae18f0f945ff2763187103c64299f9ff28ffa2da00b061" exitCode=2 Sep 29 21:42:17 crc kubenswrapper[4911]: I0929 21:42:17.096810 4911 generic.go:334] "Generic (PLEG): container finished" podID="d12bb9a1-941b-41be-8cb9-3f274e27e497" containerID="939ba4c9fca017dbacc3b9007aca336c5c5ecae178afe623cae8639a7efbffe5" exitCode=0 Sep 29 21:42:17 crc kubenswrapper[4911]: I0929 21:42:17.097874 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d12bb9a1-941b-41be-8cb9-3f274e27e497","Type":"ContainerDied","Data":"0f44a99bfb5ab8fe5bae18f0f945ff2763187103c64299f9ff28ffa2da00b061"} Sep 29 21:42:17 crc kubenswrapper[4911]: I0929 21:42:17.097919 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d12bb9a1-941b-41be-8cb9-3f274e27e497","Type":"ContainerDied","Data":"939ba4c9fca017dbacc3b9007aca336c5c5ecae178afe623cae8639a7efbffe5"} Sep 29 21:42:17 crc kubenswrapper[4911]: I0929 21:42:17.284149 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5c685495c6-r4w7p"] Sep 29 21:42:18 crc kubenswrapper[4911]: I0929 21:42:18.106610 4911 generic.go:334] "Generic (PLEG): container finished" podID="dd3556aa-f536-44fc-b9b4-ebf1fb064f4e" containerID="87b5dfa51e94bc45d7087f6e2480e0871e1ce6e7ed90b1078d3bb6628c569153" exitCode=0 Sep 29 21:42:18 crc kubenswrapper[4911]: I0929 21:42:18.106824 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84b966f6c9-9c895" event={"ID":"dd3556aa-f536-44fc-b9b4-ebf1fb064f4e","Type":"ContainerDied","Data":"87b5dfa51e94bc45d7087f6e2480e0871e1ce6e7ed90b1078d3bb6628c569153"} Sep 29 21:42:18 crc kubenswrapper[4911]: I0929 21:42:18.116647 4911 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 21:42:18 crc kubenswrapper[4911]: I0929 21:42:18.118213 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5c685495c6-r4w7p" event={"ID":"98530233-359f-4be3-a540-20553e9cbe30","Type":"ContainerStarted","Data":"92dbffd1527696cbd6e1a9801e0678cf1afeda537edb5f16bbf5f17d2583dc27"} Sep 29 21:42:18 crc kubenswrapper[4911]: I0929 21:42:18.118254 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5c685495c6-r4w7p" event={"ID":"98530233-359f-4be3-a540-20553e9cbe30","Type":"ContainerStarted","Data":"6fe50da28db4566ab5ceab012dc52bf51b78d4bee83b05f89a556e984c261ccc"} Sep 29 21:42:18 crc kubenswrapper[4911]: I0929 21:42:18.118277 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-5c685495c6-r4w7p" Sep 29 21:42:18 crc kubenswrapper[4911]: I0929 21:42:18.118294 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5c685495c6-r4w7p" event={"ID":"98530233-359f-4be3-a540-20553e9cbe30","Type":"ContainerStarted","Data":"75fcaa37844611e956c014d3e83e2c7e8b7d9b7fe235c061621d376be35b8fe7"} Sep 29 21:42:18 crc kubenswrapper[4911]: I0929 21:42:18.168328 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-5c685495c6-r4w7p" podStartSLOduration=2.168306885 podStartE2EDuration="2.168306885s" podCreationTimestamp="2025-09-29 21:42:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:42:18.157060776 +0000 UTC m=+1016.134173467" watchObservedRunningTime="2025-09-29 21:42:18.168306885 +0000 UTC m=+1016.145419556" Sep 29 21:42:18 crc kubenswrapper[4911]: I0929 21:42:18.930385 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-6b4bcd6f7-dzn4f"] Sep 29 21:42:18 crc kubenswrapper[4911]: I0929 21:42:18.932336 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6b4bcd6f7-dzn4f" Sep 29 21:42:18 crc kubenswrapper[4911]: I0929 21:42:18.935127 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Sep 29 21:42:18 crc kubenswrapper[4911]: I0929 21:42:18.935417 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Sep 29 21:42:18 crc kubenswrapper[4911]: I0929 21:42:18.967199 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-6b4bcd6f7-dzn4f"] Sep 29 21:42:19 crc kubenswrapper[4911]: I0929 21:42:19.018859 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/0920ca6c-fcb6-466b-9a0e-099dde91d938-config\") pod \"neutron-6b4bcd6f7-dzn4f\" (UID: \"0920ca6c-fcb6-466b-9a0e-099dde91d938\") " pod="openstack/neutron-6b4bcd6f7-dzn4f" Sep 29 21:42:19 crc kubenswrapper[4911]: I0929 21:42:19.018954 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0920ca6c-fcb6-466b-9a0e-099dde91d938-combined-ca-bundle\") pod \"neutron-6b4bcd6f7-dzn4f\" (UID: \"0920ca6c-fcb6-466b-9a0e-099dde91d938\") " pod="openstack/neutron-6b4bcd6f7-dzn4f" Sep 29 21:42:19 crc kubenswrapper[4911]: I0929 21:42:19.019002 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r4w2v\" (UniqueName: \"kubernetes.io/projected/0920ca6c-fcb6-466b-9a0e-099dde91d938-kube-api-access-r4w2v\") pod \"neutron-6b4bcd6f7-dzn4f\" (UID: \"0920ca6c-fcb6-466b-9a0e-099dde91d938\") " pod="openstack/neutron-6b4bcd6f7-dzn4f" Sep 29 21:42:19 crc kubenswrapper[4911]: I0929 21:42:19.019054 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/0920ca6c-fcb6-466b-9a0e-099dde91d938-httpd-config\") pod \"neutron-6b4bcd6f7-dzn4f\" (UID: \"0920ca6c-fcb6-466b-9a0e-099dde91d938\") " pod="openstack/neutron-6b4bcd6f7-dzn4f" Sep 29 21:42:19 crc kubenswrapper[4911]: I0929 21:42:19.019082 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0920ca6c-fcb6-466b-9a0e-099dde91d938-public-tls-certs\") pod \"neutron-6b4bcd6f7-dzn4f\" (UID: \"0920ca6c-fcb6-466b-9a0e-099dde91d938\") " pod="openstack/neutron-6b4bcd6f7-dzn4f" Sep 29 21:42:19 crc kubenswrapper[4911]: I0929 21:42:19.019202 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0920ca6c-fcb6-466b-9a0e-099dde91d938-internal-tls-certs\") pod \"neutron-6b4bcd6f7-dzn4f\" (UID: \"0920ca6c-fcb6-466b-9a0e-099dde91d938\") " pod="openstack/neutron-6b4bcd6f7-dzn4f" Sep 29 21:42:19 crc kubenswrapper[4911]: I0929 21:42:19.019322 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/0920ca6c-fcb6-466b-9a0e-099dde91d938-ovndb-tls-certs\") pod \"neutron-6b4bcd6f7-dzn4f\" (UID: \"0920ca6c-fcb6-466b-9a0e-099dde91d938\") " pod="openstack/neutron-6b4bcd6f7-dzn4f" Sep 29 21:42:19 crc kubenswrapper[4911]: I0929 21:42:19.124403 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/0920ca6c-fcb6-466b-9a0e-099dde91d938-config\") pod \"neutron-6b4bcd6f7-dzn4f\" (UID: \"0920ca6c-fcb6-466b-9a0e-099dde91d938\") " pod="openstack/neutron-6b4bcd6f7-dzn4f" Sep 29 21:42:19 crc kubenswrapper[4911]: I0929 21:42:19.124477 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0920ca6c-fcb6-466b-9a0e-099dde91d938-combined-ca-bundle\") pod \"neutron-6b4bcd6f7-dzn4f\" (UID: \"0920ca6c-fcb6-466b-9a0e-099dde91d938\") " pod="openstack/neutron-6b4bcd6f7-dzn4f" Sep 29 21:42:19 crc kubenswrapper[4911]: I0929 21:42:19.124516 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r4w2v\" (UniqueName: \"kubernetes.io/projected/0920ca6c-fcb6-466b-9a0e-099dde91d938-kube-api-access-r4w2v\") pod \"neutron-6b4bcd6f7-dzn4f\" (UID: \"0920ca6c-fcb6-466b-9a0e-099dde91d938\") " pod="openstack/neutron-6b4bcd6f7-dzn4f" Sep 29 21:42:19 crc kubenswrapper[4911]: I0929 21:42:19.124550 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/0920ca6c-fcb6-466b-9a0e-099dde91d938-httpd-config\") pod \"neutron-6b4bcd6f7-dzn4f\" (UID: \"0920ca6c-fcb6-466b-9a0e-099dde91d938\") " pod="openstack/neutron-6b4bcd6f7-dzn4f" Sep 29 21:42:19 crc kubenswrapper[4911]: I0929 21:42:19.124570 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0920ca6c-fcb6-466b-9a0e-099dde91d938-public-tls-certs\") pod \"neutron-6b4bcd6f7-dzn4f\" (UID: \"0920ca6c-fcb6-466b-9a0e-099dde91d938\") " pod="openstack/neutron-6b4bcd6f7-dzn4f" Sep 29 21:42:19 crc kubenswrapper[4911]: I0929 21:42:19.124612 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0920ca6c-fcb6-466b-9a0e-099dde91d938-internal-tls-certs\") pod \"neutron-6b4bcd6f7-dzn4f\" (UID: \"0920ca6c-fcb6-466b-9a0e-099dde91d938\") " pod="openstack/neutron-6b4bcd6f7-dzn4f" Sep 29 21:42:19 crc kubenswrapper[4911]: I0929 21:42:19.124637 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/0920ca6c-fcb6-466b-9a0e-099dde91d938-ovndb-tls-certs\") pod \"neutron-6b4bcd6f7-dzn4f\" (UID: \"0920ca6c-fcb6-466b-9a0e-099dde91d938\") " pod="openstack/neutron-6b4bcd6f7-dzn4f" Sep 29 21:42:19 crc kubenswrapper[4911]: I0929 21:42:19.130357 4911 generic.go:334] "Generic (PLEG): container finished" podID="d12bb9a1-941b-41be-8cb9-3f274e27e497" containerID="97309d3a86a8f19a58eed97212075dc0bbcda623387345233bfabbf6436f474c" exitCode=0 Sep 29 21:42:19 crc kubenswrapper[4911]: I0929 21:42:19.130457 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d12bb9a1-941b-41be-8cb9-3f274e27e497","Type":"ContainerDied","Data":"97309d3a86a8f19a58eed97212075dc0bbcda623387345233bfabbf6436f474c"} Sep 29 21:42:19 crc kubenswrapper[4911]: I0929 21:42:19.130572 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0920ca6c-fcb6-466b-9a0e-099dde91d938-internal-tls-certs\") pod \"neutron-6b4bcd6f7-dzn4f\" (UID: \"0920ca6c-fcb6-466b-9a0e-099dde91d938\") " pod="openstack/neutron-6b4bcd6f7-dzn4f" Sep 29 21:42:19 crc kubenswrapper[4911]: I0929 21:42:19.130666 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0920ca6c-fcb6-466b-9a0e-099dde91d938-public-tls-certs\") pod \"neutron-6b4bcd6f7-dzn4f\" (UID: \"0920ca6c-fcb6-466b-9a0e-099dde91d938\") " pod="openstack/neutron-6b4bcd6f7-dzn4f" Sep 29 21:42:19 crc kubenswrapper[4911]: I0929 21:42:19.131910 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0920ca6c-fcb6-466b-9a0e-099dde91d938-combined-ca-bundle\") pod \"neutron-6b4bcd6f7-dzn4f\" (UID: \"0920ca6c-fcb6-466b-9a0e-099dde91d938\") " pod="openstack/neutron-6b4bcd6f7-dzn4f" Sep 29 21:42:19 crc kubenswrapper[4911]: I0929 21:42:19.132617 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/0920ca6c-fcb6-466b-9a0e-099dde91d938-config\") pod \"neutron-6b4bcd6f7-dzn4f\" (UID: \"0920ca6c-fcb6-466b-9a0e-099dde91d938\") " pod="openstack/neutron-6b4bcd6f7-dzn4f" Sep 29 21:42:19 crc kubenswrapper[4911]: I0929 21:42:19.133871 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/0920ca6c-fcb6-466b-9a0e-099dde91d938-httpd-config\") pod \"neutron-6b4bcd6f7-dzn4f\" (UID: \"0920ca6c-fcb6-466b-9a0e-099dde91d938\") " pod="openstack/neutron-6b4bcd6f7-dzn4f" Sep 29 21:42:19 crc kubenswrapper[4911]: I0929 21:42:19.140915 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/0920ca6c-fcb6-466b-9a0e-099dde91d938-ovndb-tls-certs\") pod \"neutron-6b4bcd6f7-dzn4f\" (UID: \"0920ca6c-fcb6-466b-9a0e-099dde91d938\") " pod="openstack/neutron-6b4bcd6f7-dzn4f" Sep 29 21:42:19 crc kubenswrapper[4911]: I0929 21:42:19.143456 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r4w2v\" (UniqueName: \"kubernetes.io/projected/0920ca6c-fcb6-466b-9a0e-099dde91d938-kube-api-access-r4w2v\") pod \"neutron-6b4bcd6f7-dzn4f\" (UID: \"0920ca6c-fcb6-466b-9a0e-099dde91d938\") " pod="openstack/neutron-6b4bcd6f7-dzn4f" Sep 29 21:42:19 crc kubenswrapper[4911]: I0929 21:42:19.262422 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6b4bcd6f7-dzn4f" Sep 29 21:42:19 crc kubenswrapper[4911]: I0929 21:42:19.838885 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-6b4bcd6f7-dzn4f"] Sep 29 21:42:19 crc kubenswrapper[4911]: W0929 21:42:19.839797 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0920ca6c_fcb6_466b_9a0e_099dde91d938.slice/crio-77fc81d87af5e11927f9bf2fd289430af9a581d80e9bceeca99577262353b54a WatchSource:0}: Error finding container 77fc81d87af5e11927f9bf2fd289430af9a581d80e9bceeca99577262353b54a: Status 404 returned error can't find the container with id 77fc81d87af5e11927f9bf2fd289430af9a581d80e9bceeca99577262353b54a Sep 29 21:42:20 crc kubenswrapper[4911]: I0929 21:42:20.142705 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6b4bcd6f7-dzn4f" event={"ID":"0920ca6c-fcb6-466b-9a0e-099dde91d938","Type":"ContainerStarted","Data":"77fc81d87af5e11927f9bf2fd289430af9a581d80e9bceeca99577262353b54a"} Sep 29 21:42:20 crc kubenswrapper[4911]: I0929 21:42:20.786413 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-65555f7b56-th6vc" Sep 29 21:42:21 crc kubenswrapper[4911]: I0929 21:42:21.155472 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84b966f6c9-9c895" event={"ID":"dd3556aa-f536-44fc-b9b4-ebf1fb064f4e","Type":"ContainerStarted","Data":"bd9c5d91244c0801853f294bf67b5856b04e2f3531916a51e5af50e6ff73fb78"} Sep 29 21:42:21 crc kubenswrapper[4911]: I0929 21:42:21.869422 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-65555f7b56-th6vc" Sep 29 21:42:21 crc kubenswrapper[4911]: I0929 21:42:21.946449 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 21:42:22 crc kubenswrapper[4911]: I0929 21:42:22.076114 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wtsmx\" (UniqueName: \"kubernetes.io/projected/d12bb9a1-941b-41be-8cb9-3f274e27e497-kube-api-access-wtsmx\") pod \"d12bb9a1-941b-41be-8cb9-3f274e27e497\" (UID: \"d12bb9a1-941b-41be-8cb9-3f274e27e497\") " Sep 29 21:42:22 crc kubenswrapper[4911]: I0929 21:42:22.076219 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d12bb9a1-941b-41be-8cb9-3f274e27e497-scripts\") pod \"d12bb9a1-941b-41be-8cb9-3f274e27e497\" (UID: \"d12bb9a1-941b-41be-8cb9-3f274e27e497\") " Sep 29 21:42:22 crc kubenswrapper[4911]: I0929 21:42:22.076272 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d12bb9a1-941b-41be-8cb9-3f274e27e497-sg-core-conf-yaml\") pod \"d12bb9a1-941b-41be-8cb9-3f274e27e497\" (UID: \"d12bb9a1-941b-41be-8cb9-3f274e27e497\") " Sep 29 21:42:22 crc kubenswrapper[4911]: I0929 21:42:22.076305 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d12bb9a1-941b-41be-8cb9-3f274e27e497-combined-ca-bundle\") pod \"d12bb9a1-941b-41be-8cb9-3f274e27e497\" (UID: \"d12bb9a1-941b-41be-8cb9-3f274e27e497\") " Sep 29 21:42:22 crc kubenswrapper[4911]: I0929 21:42:22.076327 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d12bb9a1-941b-41be-8cb9-3f274e27e497-config-data\") pod \"d12bb9a1-941b-41be-8cb9-3f274e27e497\" (UID: \"d12bb9a1-941b-41be-8cb9-3f274e27e497\") " Sep 29 21:42:22 crc kubenswrapper[4911]: I0929 21:42:22.076371 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d12bb9a1-941b-41be-8cb9-3f274e27e497-log-httpd\") pod \"d12bb9a1-941b-41be-8cb9-3f274e27e497\" (UID: \"d12bb9a1-941b-41be-8cb9-3f274e27e497\") " Sep 29 21:42:22 crc kubenswrapper[4911]: I0929 21:42:22.076676 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d12bb9a1-941b-41be-8cb9-3f274e27e497-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "d12bb9a1-941b-41be-8cb9-3f274e27e497" (UID: "d12bb9a1-941b-41be-8cb9-3f274e27e497"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:42:22 crc kubenswrapper[4911]: I0929 21:42:22.076750 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d12bb9a1-941b-41be-8cb9-3f274e27e497-run-httpd\") pod \"d12bb9a1-941b-41be-8cb9-3f274e27e497\" (UID: \"d12bb9a1-941b-41be-8cb9-3f274e27e497\") " Sep 29 21:42:22 crc kubenswrapper[4911]: I0929 21:42:22.076940 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d12bb9a1-941b-41be-8cb9-3f274e27e497-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "d12bb9a1-941b-41be-8cb9-3f274e27e497" (UID: "d12bb9a1-941b-41be-8cb9-3f274e27e497"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:42:22 crc kubenswrapper[4911]: I0929 21:42:22.077160 4911 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d12bb9a1-941b-41be-8cb9-3f274e27e497-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:22 crc kubenswrapper[4911]: I0929 21:42:22.077177 4911 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d12bb9a1-941b-41be-8cb9-3f274e27e497-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:22 crc kubenswrapper[4911]: I0929 21:42:22.091228 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d12bb9a1-941b-41be-8cb9-3f274e27e497-scripts" (OuterVolumeSpecName: "scripts") pod "d12bb9a1-941b-41be-8cb9-3f274e27e497" (UID: "d12bb9a1-941b-41be-8cb9-3f274e27e497"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:42:22 crc kubenswrapper[4911]: I0929 21:42:22.092940 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d12bb9a1-941b-41be-8cb9-3f274e27e497-kube-api-access-wtsmx" (OuterVolumeSpecName: "kube-api-access-wtsmx") pod "d12bb9a1-941b-41be-8cb9-3f274e27e497" (UID: "d12bb9a1-941b-41be-8cb9-3f274e27e497"). InnerVolumeSpecName "kube-api-access-wtsmx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:42:22 crc kubenswrapper[4911]: I0929 21:42:22.104675 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d12bb9a1-941b-41be-8cb9-3f274e27e497-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "d12bb9a1-941b-41be-8cb9-3f274e27e497" (UID: "d12bb9a1-941b-41be-8cb9-3f274e27e497"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:42:22 crc kubenswrapper[4911]: I0929 21:42:22.124966 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d12bb9a1-941b-41be-8cb9-3f274e27e497-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d12bb9a1-941b-41be-8cb9-3f274e27e497" (UID: "d12bb9a1-941b-41be-8cb9-3f274e27e497"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:42:22 crc kubenswrapper[4911]: I0929 21:42:22.183142 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wtsmx\" (UniqueName: \"kubernetes.io/projected/d12bb9a1-941b-41be-8cb9-3f274e27e497-kube-api-access-wtsmx\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:22 crc kubenswrapper[4911]: I0929 21:42:22.183483 4911 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d12bb9a1-941b-41be-8cb9-3f274e27e497-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:22 crc kubenswrapper[4911]: I0929 21:42:22.183495 4911 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d12bb9a1-941b-41be-8cb9-3f274e27e497-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:22 crc kubenswrapper[4911]: I0929 21:42:22.183507 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d12bb9a1-941b-41be-8cb9-3f274e27e497-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:22 crc kubenswrapper[4911]: I0929 21:42:22.187034 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d12bb9a1-941b-41be-8cb9-3f274e27e497-config-data" (OuterVolumeSpecName: "config-data") pod "d12bb9a1-941b-41be-8cb9-3f274e27e497" (UID: "d12bb9a1-941b-41be-8cb9-3f274e27e497"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:42:22 crc kubenswrapper[4911]: I0929 21:42:22.192823 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d12bb9a1-941b-41be-8cb9-3f274e27e497","Type":"ContainerDied","Data":"e54da3d79eb60932376baf4a890797757bc83cb6cadc19db255b8a652bbce6c4"} Sep 29 21:42:22 crc kubenswrapper[4911]: I0929 21:42:22.192881 4911 scope.go:117] "RemoveContainer" containerID="0f44a99bfb5ab8fe5bae18f0f945ff2763187103c64299f9ff28ffa2da00b061" Sep 29 21:42:22 crc kubenswrapper[4911]: I0929 21:42:22.193044 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 21:42:22 crc kubenswrapper[4911]: I0929 21:42:22.200386 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6b4bcd6f7-dzn4f" event={"ID":"0920ca6c-fcb6-466b-9a0e-099dde91d938","Type":"ContainerStarted","Data":"61744889055f810be2790cfdc25654899b3a1aae78f0279319e6fe7c07f42823"} Sep 29 21:42:22 crc kubenswrapper[4911]: I0929 21:42:22.200433 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6b4bcd6f7-dzn4f" event={"ID":"0920ca6c-fcb6-466b-9a0e-099dde91d938","Type":"ContainerStarted","Data":"456cfcde11f910964a406a9dbde4945322449d614950d362cf3ee3ae0e41a0ba"} Sep 29 21:42:22 crc kubenswrapper[4911]: I0929 21:42:22.200763 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-84b966f6c9-9c895" Sep 29 21:42:22 crc kubenswrapper[4911]: I0929 21:42:22.200885 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-6b4bcd6f7-dzn4f" Sep 29 21:42:22 crc kubenswrapper[4911]: I0929 21:42:22.216550 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-6b4bcd6f7-dzn4f" podStartSLOduration=4.216535135 podStartE2EDuration="4.216535135s" podCreationTimestamp="2025-09-29 21:42:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:42:22.216109852 +0000 UTC m=+1020.193222533" watchObservedRunningTime="2025-09-29 21:42:22.216535135 +0000 UTC m=+1020.193647816" Sep 29 21:42:22 crc kubenswrapper[4911]: I0929 21:42:22.222188 4911 scope.go:117] "RemoveContainer" containerID="97309d3a86a8f19a58eed97212075dc0bbcda623387345233bfabbf6436f474c" Sep 29 21:42:22 crc kubenswrapper[4911]: I0929 21:42:22.248338 4911 scope.go:117] "RemoveContainer" containerID="939ba4c9fca017dbacc3b9007aca336c5c5ecae178afe623cae8639a7efbffe5" Sep 29 21:42:22 crc kubenswrapper[4911]: I0929 21:42:22.251634 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-84b966f6c9-9c895" podStartSLOduration=6.251614753 podStartE2EDuration="6.251614753s" podCreationTimestamp="2025-09-29 21:42:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:42:22.240166328 +0000 UTC m=+1020.217279019" watchObservedRunningTime="2025-09-29 21:42:22.251614753 +0000 UTC m=+1020.228727424" Sep 29 21:42:22 crc kubenswrapper[4911]: I0929 21:42:22.277843 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 21:42:22 crc kubenswrapper[4911]: I0929 21:42:22.285692 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d12bb9a1-941b-41be-8cb9-3f274e27e497-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:22 crc kubenswrapper[4911]: I0929 21:42:22.287104 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 29 21:42:22 crc kubenswrapper[4911]: I0929 21:42:22.294712 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 29 21:42:22 crc kubenswrapper[4911]: E0929 21:42:22.295092 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d12bb9a1-941b-41be-8cb9-3f274e27e497" containerName="ceilometer-central-agent" Sep 29 21:42:22 crc kubenswrapper[4911]: I0929 21:42:22.295108 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="d12bb9a1-941b-41be-8cb9-3f274e27e497" containerName="ceilometer-central-agent" Sep 29 21:42:22 crc kubenswrapper[4911]: E0929 21:42:22.295139 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d12bb9a1-941b-41be-8cb9-3f274e27e497" containerName="sg-core" Sep 29 21:42:22 crc kubenswrapper[4911]: I0929 21:42:22.295144 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="d12bb9a1-941b-41be-8cb9-3f274e27e497" containerName="sg-core" Sep 29 21:42:22 crc kubenswrapper[4911]: E0929 21:42:22.295158 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d12bb9a1-941b-41be-8cb9-3f274e27e497" containerName="ceilometer-notification-agent" Sep 29 21:42:22 crc kubenswrapper[4911]: I0929 21:42:22.295164 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="d12bb9a1-941b-41be-8cb9-3f274e27e497" containerName="ceilometer-notification-agent" Sep 29 21:42:22 crc kubenswrapper[4911]: I0929 21:42:22.295314 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="d12bb9a1-941b-41be-8cb9-3f274e27e497" containerName="ceilometer-notification-agent" Sep 29 21:42:22 crc kubenswrapper[4911]: I0929 21:42:22.295331 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="d12bb9a1-941b-41be-8cb9-3f274e27e497" containerName="sg-core" Sep 29 21:42:22 crc kubenswrapper[4911]: I0929 21:42:22.295337 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="d12bb9a1-941b-41be-8cb9-3f274e27e497" containerName="ceilometer-central-agent" Sep 29 21:42:22 crc kubenswrapper[4911]: I0929 21:42:22.296720 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 21:42:22 crc kubenswrapper[4911]: I0929 21:42:22.303166 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 29 21:42:22 crc kubenswrapper[4911]: I0929 21:42:22.303456 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 29 21:42:22 crc kubenswrapper[4911]: I0929 21:42:22.337860 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 21:42:22 crc kubenswrapper[4911]: I0929 21:42:22.386885 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b07b47db-f8f0-4357-8bcd-4d3bef88f8d4-run-httpd\") pod \"ceilometer-0\" (UID: \"b07b47db-f8f0-4357-8bcd-4d3bef88f8d4\") " pod="openstack/ceilometer-0" Sep 29 21:42:22 crc kubenswrapper[4911]: I0929 21:42:22.386965 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dxp7f\" (UniqueName: \"kubernetes.io/projected/b07b47db-f8f0-4357-8bcd-4d3bef88f8d4-kube-api-access-dxp7f\") pod \"ceilometer-0\" (UID: \"b07b47db-f8f0-4357-8bcd-4d3bef88f8d4\") " pod="openstack/ceilometer-0" Sep 29 21:42:22 crc kubenswrapper[4911]: I0929 21:42:22.386987 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b07b47db-f8f0-4357-8bcd-4d3bef88f8d4-config-data\") pod \"ceilometer-0\" (UID: \"b07b47db-f8f0-4357-8bcd-4d3bef88f8d4\") " pod="openstack/ceilometer-0" Sep 29 21:42:22 crc kubenswrapper[4911]: I0929 21:42:22.387026 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b07b47db-f8f0-4357-8bcd-4d3bef88f8d4-log-httpd\") pod \"ceilometer-0\" (UID: \"b07b47db-f8f0-4357-8bcd-4d3bef88f8d4\") " pod="openstack/ceilometer-0" Sep 29 21:42:22 crc kubenswrapper[4911]: I0929 21:42:22.387131 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b07b47db-f8f0-4357-8bcd-4d3bef88f8d4-scripts\") pod \"ceilometer-0\" (UID: \"b07b47db-f8f0-4357-8bcd-4d3bef88f8d4\") " pod="openstack/ceilometer-0" Sep 29 21:42:22 crc kubenswrapper[4911]: I0929 21:42:22.387176 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b07b47db-f8f0-4357-8bcd-4d3bef88f8d4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b07b47db-f8f0-4357-8bcd-4d3bef88f8d4\") " pod="openstack/ceilometer-0" Sep 29 21:42:22 crc kubenswrapper[4911]: I0929 21:42:22.387200 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b07b47db-f8f0-4357-8bcd-4d3bef88f8d4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b07b47db-f8f0-4357-8bcd-4d3bef88f8d4\") " pod="openstack/ceilometer-0" Sep 29 21:42:22 crc kubenswrapper[4911]: I0929 21:42:22.488758 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dxp7f\" (UniqueName: \"kubernetes.io/projected/b07b47db-f8f0-4357-8bcd-4d3bef88f8d4-kube-api-access-dxp7f\") pod \"ceilometer-0\" (UID: \"b07b47db-f8f0-4357-8bcd-4d3bef88f8d4\") " pod="openstack/ceilometer-0" Sep 29 21:42:22 crc kubenswrapper[4911]: I0929 21:42:22.488840 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b07b47db-f8f0-4357-8bcd-4d3bef88f8d4-config-data\") pod \"ceilometer-0\" (UID: \"b07b47db-f8f0-4357-8bcd-4d3bef88f8d4\") " pod="openstack/ceilometer-0" Sep 29 21:42:22 crc kubenswrapper[4911]: I0929 21:42:22.488867 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b07b47db-f8f0-4357-8bcd-4d3bef88f8d4-log-httpd\") pod \"ceilometer-0\" (UID: \"b07b47db-f8f0-4357-8bcd-4d3bef88f8d4\") " pod="openstack/ceilometer-0" Sep 29 21:42:22 crc kubenswrapper[4911]: I0929 21:42:22.488913 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b07b47db-f8f0-4357-8bcd-4d3bef88f8d4-scripts\") pod \"ceilometer-0\" (UID: \"b07b47db-f8f0-4357-8bcd-4d3bef88f8d4\") " pod="openstack/ceilometer-0" Sep 29 21:42:22 crc kubenswrapper[4911]: I0929 21:42:22.488949 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b07b47db-f8f0-4357-8bcd-4d3bef88f8d4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b07b47db-f8f0-4357-8bcd-4d3bef88f8d4\") " pod="openstack/ceilometer-0" Sep 29 21:42:22 crc kubenswrapper[4911]: I0929 21:42:22.488965 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b07b47db-f8f0-4357-8bcd-4d3bef88f8d4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b07b47db-f8f0-4357-8bcd-4d3bef88f8d4\") " pod="openstack/ceilometer-0" Sep 29 21:42:22 crc kubenswrapper[4911]: I0929 21:42:22.489013 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b07b47db-f8f0-4357-8bcd-4d3bef88f8d4-run-httpd\") pod \"ceilometer-0\" (UID: \"b07b47db-f8f0-4357-8bcd-4d3bef88f8d4\") " pod="openstack/ceilometer-0" Sep 29 21:42:22 crc kubenswrapper[4911]: I0929 21:42:22.489546 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b07b47db-f8f0-4357-8bcd-4d3bef88f8d4-run-httpd\") pod \"ceilometer-0\" (UID: \"b07b47db-f8f0-4357-8bcd-4d3bef88f8d4\") " pod="openstack/ceilometer-0" Sep 29 21:42:22 crc kubenswrapper[4911]: I0929 21:42:22.489830 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b07b47db-f8f0-4357-8bcd-4d3bef88f8d4-log-httpd\") pod \"ceilometer-0\" (UID: \"b07b47db-f8f0-4357-8bcd-4d3bef88f8d4\") " pod="openstack/ceilometer-0" Sep 29 21:42:22 crc kubenswrapper[4911]: I0929 21:42:22.495442 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b07b47db-f8f0-4357-8bcd-4d3bef88f8d4-config-data\") pod \"ceilometer-0\" (UID: \"b07b47db-f8f0-4357-8bcd-4d3bef88f8d4\") " pod="openstack/ceilometer-0" Sep 29 21:42:22 crc kubenswrapper[4911]: I0929 21:42:22.496111 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b07b47db-f8f0-4357-8bcd-4d3bef88f8d4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b07b47db-f8f0-4357-8bcd-4d3bef88f8d4\") " pod="openstack/ceilometer-0" Sep 29 21:42:22 crc kubenswrapper[4911]: I0929 21:42:22.500206 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b07b47db-f8f0-4357-8bcd-4d3bef88f8d4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b07b47db-f8f0-4357-8bcd-4d3bef88f8d4\") " pod="openstack/ceilometer-0" Sep 29 21:42:22 crc kubenswrapper[4911]: I0929 21:42:22.503992 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b07b47db-f8f0-4357-8bcd-4d3bef88f8d4-scripts\") pod \"ceilometer-0\" (UID: \"b07b47db-f8f0-4357-8bcd-4d3bef88f8d4\") " pod="openstack/ceilometer-0" Sep 29 21:42:22 crc kubenswrapper[4911]: I0929 21:42:22.522299 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dxp7f\" (UniqueName: \"kubernetes.io/projected/b07b47db-f8f0-4357-8bcd-4d3bef88f8d4-kube-api-access-dxp7f\") pod \"ceilometer-0\" (UID: \"b07b47db-f8f0-4357-8bcd-4d3bef88f8d4\") " pod="openstack/ceilometer-0" Sep 29 21:42:22 crc kubenswrapper[4911]: I0929 21:42:22.618900 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 21:42:22 crc kubenswrapper[4911]: I0929 21:42:22.748858 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d12bb9a1-941b-41be-8cb9-3f274e27e497" path="/var/lib/kubelet/pods/d12bb9a1-941b-41be-8cb9-3f274e27e497/volumes" Sep 29 21:42:23 crc kubenswrapper[4911]: I0929 21:42:23.194849 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 21:42:24 crc kubenswrapper[4911]: I0929 21:42:24.221088 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b07b47db-f8f0-4357-8bcd-4d3bef88f8d4","Type":"ContainerStarted","Data":"2614ee3255dbe426372dbbffdeb85a2af5751103c893a0cbca05d0767133e367"} Sep 29 21:42:24 crc kubenswrapper[4911]: I0929 21:42:24.221503 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b07b47db-f8f0-4357-8bcd-4d3bef88f8d4","Type":"ContainerStarted","Data":"2eed44f67217e187e8f570440b9ac579d6cdcb39fb319e787e4bc432681e9b9c"} Sep 29 21:42:24 crc kubenswrapper[4911]: I0929 21:42:24.240419 4911 generic.go:334] "Generic (PLEG): container finished" podID="67b23e82-069b-48d1-b154-5280fea52947" containerID="9c345e5b5cb1fe927e5550e3460d26da1aee54bbf084b90aaf78c7d478d45088" exitCode=0 Sep 29 21:42:24 crc kubenswrapper[4911]: I0929 21:42:24.240589 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-rm86n" event={"ID":"67b23e82-069b-48d1-b154-5280fea52947","Type":"ContainerDied","Data":"9c345e5b5cb1fe927e5550e3460d26da1aee54bbf084b90aaf78c7d478d45088"} Sep 29 21:42:25 crc kubenswrapper[4911]: I0929 21:42:25.250016 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b07b47db-f8f0-4357-8bcd-4d3bef88f8d4","Type":"ContainerStarted","Data":"8fa39b93171fc756c0a52e678b164b1c533b7e74bc5c7a1f12b1e9088acbf304"} Sep 29 21:42:25 crc kubenswrapper[4911]: I0929 21:42:25.612992 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-rm86n" Sep 29 21:42:25 crc kubenswrapper[4911]: I0929 21:42:25.660997 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/67b23e82-069b-48d1-b154-5280fea52947-db-sync-config-data\") pod \"67b23e82-069b-48d1-b154-5280fea52947\" (UID: \"67b23e82-069b-48d1-b154-5280fea52947\") " Sep 29 21:42:25 crc kubenswrapper[4911]: I0929 21:42:25.661201 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lj9lm\" (UniqueName: \"kubernetes.io/projected/67b23e82-069b-48d1-b154-5280fea52947-kube-api-access-lj9lm\") pod \"67b23e82-069b-48d1-b154-5280fea52947\" (UID: \"67b23e82-069b-48d1-b154-5280fea52947\") " Sep 29 21:42:25 crc kubenswrapper[4911]: I0929 21:42:25.661266 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67b23e82-069b-48d1-b154-5280fea52947-combined-ca-bundle\") pod \"67b23e82-069b-48d1-b154-5280fea52947\" (UID: \"67b23e82-069b-48d1-b154-5280fea52947\") " Sep 29 21:42:25 crc kubenswrapper[4911]: I0929 21:42:25.671565 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/67b23e82-069b-48d1-b154-5280fea52947-kube-api-access-lj9lm" (OuterVolumeSpecName: "kube-api-access-lj9lm") pod "67b23e82-069b-48d1-b154-5280fea52947" (UID: "67b23e82-069b-48d1-b154-5280fea52947"). InnerVolumeSpecName "kube-api-access-lj9lm". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:42:25 crc kubenswrapper[4911]: I0929 21:42:25.678973 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/67b23e82-069b-48d1-b154-5280fea52947-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "67b23e82-069b-48d1-b154-5280fea52947" (UID: "67b23e82-069b-48d1-b154-5280fea52947"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:42:25 crc kubenswrapper[4911]: I0929 21:42:25.700243 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/67b23e82-069b-48d1-b154-5280fea52947-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "67b23e82-069b-48d1-b154-5280fea52947" (UID: "67b23e82-069b-48d1-b154-5280fea52947"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:42:25 crc kubenswrapper[4911]: I0929 21:42:25.763860 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lj9lm\" (UniqueName: \"kubernetes.io/projected/67b23e82-069b-48d1-b154-5280fea52947-kube-api-access-lj9lm\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:25 crc kubenswrapper[4911]: I0929 21:42:25.764018 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67b23e82-069b-48d1-b154-5280fea52947-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:25 crc kubenswrapper[4911]: I0929 21:42:25.766069 4911 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/67b23e82-069b-48d1-b154-5280fea52947-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:26 crc kubenswrapper[4911]: I0929 21:42:26.258828 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b07b47db-f8f0-4357-8bcd-4d3bef88f8d4","Type":"ContainerStarted","Data":"41e8ec04a9b98666109628dbbf6818a83306ab02707f9c132c952dc52f25c03e"} Sep 29 21:42:26 crc kubenswrapper[4911]: I0929 21:42:26.261714 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-rm86n" event={"ID":"67b23e82-069b-48d1-b154-5280fea52947","Type":"ContainerDied","Data":"5c6faa451a34c766ae764ee21f2fa5fce46741ae7f7592890b0d7e2bf4af14ae"} Sep 29 21:42:26 crc kubenswrapper[4911]: I0929 21:42:26.261856 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5c6faa451a34c766ae764ee21f2fa5fce46741ae7f7592890b0d7e2bf4af14ae" Sep 29 21:42:26 crc kubenswrapper[4911]: I0929 21:42:26.261928 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-rm86n" Sep 29 21:42:26 crc kubenswrapper[4911]: I0929 21:42:26.535070 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-844ff9b7c7-s8vcn"] Sep 29 21:42:26 crc kubenswrapper[4911]: E0929 21:42:26.535434 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67b23e82-069b-48d1-b154-5280fea52947" containerName="barbican-db-sync" Sep 29 21:42:26 crc kubenswrapper[4911]: I0929 21:42:26.535450 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="67b23e82-069b-48d1-b154-5280fea52947" containerName="barbican-db-sync" Sep 29 21:42:26 crc kubenswrapper[4911]: I0929 21:42:26.535630 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="67b23e82-069b-48d1-b154-5280fea52947" containerName="barbican-db-sync" Sep 29 21:42:26 crc kubenswrapper[4911]: I0929 21:42:26.538546 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-844ff9b7c7-s8vcn" Sep 29 21:42:26 crc kubenswrapper[4911]: I0929 21:42:26.543717 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-v2jnd" Sep 29 21:42:26 crc kubenswrapper[4911]: I0929 21:42:26.544013 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Sep 29 21:42:26 crc kubenswrapper[4911]: I0929 21:42:26.544208 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Sep 29 21:42:26 crc kubenswrapper[4911]: I0929 21:42:26.551330 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-844ff9b7c7-s8vcn"] Sep 29 21:42:26 crc kubenswrapper[4911]: I0929 21:42:26.582975 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t4fpr\" (UniqueName: \"kubernetes.io/projected/115e92e2-b175-4f4b-a9fe-cdd0b7d3d104-kube-api-access-t4fpr\") pod \"barbican-worker-844ff9b7c7-s8vcn\" (UID: \"115e92e2-b175-4f4b-a9fe-cdd0b7d3d104\") " pod="openstack/barbican-worker-844ff9b7c7-s8vcn" Sep 29 21:42:26 crc kubenswrapper[4911]: I0929 21:42:26.583065 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/115e92e2-b175-4f4b-a9fe-cdd0b7d3d104-logs\") pod \"barbican-worker-844ff9b7c7-s8vcn\" (UID: \"115e92e2-b175-4f4b-a9fe-cdd0b7d3d104\") " pod="openstack/barbican-worker-844ff9b7c7-s8vcn" Sep 29 21:42:26 crc kubenswrapper[4911]: I0929 21:42:26.583117 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/115e92e2-b175-4f4b-a9fe-cdd0b7d3d104-combined-ca-bundle\") pod \"barbican-worker-844ff9b7c7-s8vcn\" (UID: \"115e92e2-b175-4f4b-a9fe-cdd0b7d3d104\") " pod="openstack/barbican-worker-844ff9b7c7-s8vcn" Sep 29 21:42:26 crc kubenswrapper[4911]: I0929 21:42:26.583391 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/115e92e2-b175-4f4b-a9fe-cdd0b7d3d104-config-data\") pod \"barbican-worker-844ff9b7c7-s8vcn\" (UID: \"115e92e2-b175-4f4b-a9fe-cdd0b7d3d104\") " pod="openstack/barbican-worker-844ff9b7c7-s8vcn" Sep 29 21:42:26 crc kubenswrapper[4911]: I0929 21:42:26.583455 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/115e92e2-b175-4f4b-a9fe-cdd0b7d3d104-config-data-custom\") pod \"barbican-worker-844ff9b7c7-s8vcn\" (UID: \"115e92e2-b175-4f4b-a9fe-cdd0b7d3d104\") " pod="openstack/barbican-worker-844ff9b7c7-s8vcn" Sep 29 21:42:26 crc kubenswrapper[4911]: I0929 21:42:26.599519 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-84b966f6c9-9c895" Sep 29 21:42:26 crc kubenswrapper[4911]: I0929 21:42:26.647634 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-5d79d56b48-nd52h"] Sep 29 21:42:26 crc kubenswrapper[4911]: I0929 21:42:26.649315 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-5d79d56b48-nd52h" Sep 29 21:42:26 crc kubenswrapper[4911]: I0929 21:42:26.651173 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Sep 29 21:42:26 crc kubenswrapper[4911]: I0929 21:42:26.685159 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t4fpr\" (UniqueName: \"kubernetes.io/projected/115e92e2-b175-4f4b-a9fe-cdd0b7d3d104-kube-api-access-t4fpr\") pod \"barbican-worker-844ff9b7c7-s8vcn\" (UID: \"115e92e2-b175-4f4b-a9fe-cdd0b7d3d104\") " pod="openstack/barbican-worker-844ff9b7c7-s8vcn" Sep 29 21:42:26 crc kubenswrapper[4911]: I0929 21:42:26.685307 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zwljv\" (UniqueName: \"kubernetes.io/projected/1fee9300-cab0-441a-a47f-a5a1fc02c24d-kube-api-access-zwljv\") pod \"barbican-keystone-listener-5d79d56b48-nd52h\" (UID: \"1fee9300-cab0-441a-a47f-a5a1fc02c24d\") " pod="openstack/barbican-keystone-listener-5d79d56b48-nd52h" Sep 29 21:42:26 crc kubenswrapper[4911]: I0929 21:42:26.685424 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/115e92e2-b175-4f4b-a9fe-cdd0b7d3d104-logs\") pod \"barbican-worker-844ff9b7c7-s8vcn\" (UID: \"115e92e2-b175-4f4b-a9fe-cdd0b7d3d104\") " pod="openstack/barbican-worker-844ff9b7c7-s8vcn" Sep 29 21:42:26 crc kubenswrapper[4911]: I0929 21:42:26.685612 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/115e92e2-b175-4f4b-a9fe-cdd0b7d3d104-combined-ca-bundle\") pod \"barbican-worker-844ff9b7c7-s8vcn\" (UID: \"115e92e2-b175-4f4b-a9fe-cdd0b7d3d104\") " pod="openstack/barbican-worker-844ff9b7c7-s8vcn" Sep 29 21:42:26 crc kubenswrapper[4911]: I0929 21:42:26.685707 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1fee9300-cab0-441a-a47f-a5a1fc02c24d-logs\") pod \"barbican-keystone-listener-5d79d56b48-nd52h\" (UID: \"1fee9300-cab0-441a-a47f-a5a1fc02c24d\") " pod="openstack/barbican-keystone-listener-5d79d56b48-nd52h" Sep 29 21:42:26 crc kubenswrapper[4911]: I0929 21:42:26.685895 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1fee9300-cab0-441a-a47f-a5a1fc02c24d-config-data\") pod \"barbican-keystone-listener-5d79d56b48-nd52h\" (UID: \"1fee9300-cab0-441a-a47f-a5a1fc02c24d\") " pod="openstack/barbican-keystone-listener-5d79d56b48-nd52h" Sep 29 21:42:26 crc kubenswrapper[4911]: I0929 21:42:26.686047 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/115e92e2-b175-4f4b-a9fe-cdd0b7d3d104-config-data\") pod \"barbican-worker-844ff9b7c7-s8vcn\" (UID: \"115e92e2-b175-4f4b-a9fe-cdd0b7d3d104\") " pod="openstack/barbican-worker-844ff9b7c7-s8vcn" Sep 29 21:42:26 crc kubenswrapper[4911]: I0929 21:42:26.686356 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/115e92e2-b175-4f4b-a9fe-cdd0b7d3d104-logs\") pod \"barbican-worker-844ff9b7c7-s8vcn\" (UID: \"115e92e2-b175-4f4b-a9fe-cdd0b7d3d104\") " pod="openstack/barbican-worker-844ff9b7c7-s8vcn" Sep 29 21:42:26 crc kubenswrapper[4911]: I0929 21:42:26.692500 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1fee9300-cab0-441a-a47f-a5a1fc02c24d-config-data-custom\") pod \"barbican-keystone-listener-5d79d56b48-nd52h\" (UID: \"1fee9300-cab0-441a-a47f-a5a1fc02c24d\") " pod="openstack/barbican-keystone-listener-5d79d56b48-nd52h" Sep 29 21:42:26 crc kubenswrapper[4911]: I0929 21:42:26.697498 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-5d79d56b48-nd52h"] Sep 29 21:42:26 crc kubenswrapper[4911]: I0929 21:42:26.713943 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/115e92e2-b175-4f4b-a9fe-cdd0b7d3d104-config-data-custom\") pod \"barbican-worker-844ff9b7c7-s8vcn\" (UID: \"115e92e2-b175-4f4b-a9fe-cdd0b7d3d104\") " pod="openstack/barbican-worker-844ff9b7c7-s8vcn" Sep 29 21:42:26 crc kubenswrapper[4911]: I0929 21:42:26.714040 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fee9300-cab0-441a-a47f-a5a1fc02c24d-combined-ca-bundle\") pod \"barbican-keystone-listener-5d79d56b48-nd52h\" (UID: \"1fee9300-cab0-441a-a47f-a5a1fc02c24d\") " pod="openstack/barbican-keystone-listener-5d79d56b48-nd52h" Sep 29 21:42:26 crc kubenswrapper[4911]: I0929 21:42:26.715549 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/115e92e2-b175-4f4b-a9fe-cdd0b7d3d104-config-data\") pod \"barbican-worker-844ff9b7c7-s8vcn\" (UID: \"115e92e2-b175-4f4b-a9fe-cdd0b7d3d104\") " pod="openstack/barbican-worker-844ff9b7c7-s8vcn" Sep 29 21:42:26 crc kubenswrapper[4911]: I0929 21:42:26.725720 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/115e92e2-b175-4f4b-a9fe-cdd0b7d3d104-combined-ca-bundle\") pod \"barbican-worker-844ff9b7c7-s8vcn\" (UID: \"115e92e2-b175-4f4b-a9fe-cdd0b7d3d104\") " pod="openstack/barbican-worker-844ff9b7c7-s8vcn" Sep 29 21:42:26 crc kubenswrapper[4911]: I0929 21:42:26.726396 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t4fpr\" (UniqueName: \"kubernetes.io/projected/115e92e2-b175-4f4b-a9fe-cdd0b7d3d104-kube-api-access-t4fpr\") pod \"barbican-worker-844ff9b7c7-s8vcn\" (UID: \"115e92e2-b175-4f4b-a9fe-cdd0b7d3d104\") " pod="openstack/barbican-worker-844ff9b7c7-s8vcn" Sep 29 21:42:26 crc kubenswrapper[4911]: I0929 21:42:26.728052 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/115e92e2-b175-4f4b-a9fe-cdd0b7d3d104-config-data-custom\") pod \"barbican-worker-844ff9b7c7-s8vcn\" (UID: \"115e92e2-b175-4f4b-a9fe-cdd0b7d3d104\") " pod="openstack/barbican-worker-844ff9b7c7-s8vcn" Sep 29 21:42:26 crc kubenswrapper[4911]: I0929 21:42:26.778991 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-84b966f6c9-9c895"] Sep 29 21:42:26 crc kubenswrapper[4911]: I0929 21:42:26.779220 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-75c8ddd69c-jqmrp"] Sep 29 21:42:26 crc kubenswrapper[4911]: I0929 21:42:26.780498 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75c8ddd69c-jqmrp" Sep 29 21:42:26 crc kubenswrapper[4911]: I0929 21:42:26.801031 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-75c8ddd69c-jqmrp"] Sep 29 21:42:26 crc kubenswrapper[4911]: I0929 21:42:26.820184 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fee9300-cab0-441a-a47f-a5a1fc02c24d-combined-ca-bundle\") pod \"barbican-keystone-listener-5d79d56b48-nd52h\" (UID: \"1fee9300-cab0-441a-a47f-a5a1fc02c24d\") " pod="openstack/barbican-keystone-listener-5d79d56b48-nd52h" Sep 29 21:42:26 crc kubenswrapper[4911]: I0929 21:42:26.820278 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zwljv\" (UniqueName: \"kubernetes.io/projected/1fee9300-cab0-441a-a47f-a5a1fc02c24d-kube-api-access-zwljv\") pod \"barbican-keystone-listener-5d79d56b48-nd52h\" (UID: \"1fee9300-cab0-441a-a47f-a5a1fc02c24d\") " pod="openstack/barbican-keystone-listener-5d79d56b48-nd52h" Sep 29 21:42:26 crc kubenswrapper[4911]: I0929 21:42:26.820322 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1fee9300-cab0-441a-a47f-a5a1fc02c24d-logs\") pod \"barbican-keystone-listener-5d79d56b48-nd52h\" (UID: \"1fee9300-cab0-441a-a47f-a5a1fc02c24d\") " pod="openstack/barbican-keystone-listener-5d79d56b48-nd52h" Sep 29 21:42:26 crc kubenswrapper[4911]: I0929 21:42:26.820373 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1fee9300-cab0-441a-a47f-a5a1fc02c24d-config-data\") pod \"barbican-keystone-listener-5d79d56b48-nd52h\" (UID: \"1fee9300-cab0-441a-a47f-a5a1fc02c24d\") " pod="openstack/barbican-keystone-listener-5d79d56b48-nd52h" Sep 29 21:42:26 crc kubenswrapper[4911]: I0929 21:42:26.820431 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1fee9300-cab0-441a-a47f-a5a1fc02c24d-config-data-custom\") pod \"barbican-keystone-listener-5d79d56b48-nd52h\" (UID: \"1fee9300-cab0-441a-a47f-a5a1fc02c24d\") " pod="openstack/barbican-keystone-listener-5d79d56b48-nd52h" Sep 29 21:42:26 crc kubenswrapper[4911]: I0929 21:42:26.822076 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1fee9300-cab0-441a-a47f-a5a1fc02c24d-logs\") pod \"barbican-keystone-listener-5d79d56b48-nd52h\" (UID: \"1fee9300-cab0-441a-a47f-a5a1fc02c24d\") " pod="openstack/barbican-keystone-listener-5d79d56b48-nd52h" Sep 29 21:42:26 crc kubenswrapper[4911]: I0929 21:42:26.822853 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-79b7f696f8-zxrwh"] Sep 29 21:42:26 crc kubenswrapper[4911]: I0929 21:42:26.824237 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-79b7f696f8-zxrwh" Sep 29 21:42:26 crc kubenswrapper[4911]: I0929 21:42:26.826335 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1fee9300-cab0-441a-a47f-a5a1fc02c24d-config-data-custom\") pod \"barbican-keystone-listener-5d79d56b48-nd52h\" (UID: \"1fee9300-cab0-441a-a47f-a5a1fc02c24d\") " pod="openstack/barbican-keystone-listener-5d79d56b48-nd52h" Sep 29 21:42:26 crc kubenswrapper[4911]: I0929 21:42:26.826563 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Sep 29 21:42:26 crc kubenswrapper[4911]: I0929 21:42:26.831288 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fee9300-cab0-441a-a47f-a5a1fc02c24d-combined-ca-bundle\") pod \"barbican-keystone-listener-5d79d56b48-nd52h\" (UID: \"1fee9300-cab0-441a-a47f-a5a1fc02c24d\") " pod="openstack/barbican-keystone-listener-5d79d56b48-nd52h" Sep 29 21:42:26 crc kubenswrapper[4911]: I0929 21:42:26.836209 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-79b7f696f8-zxrwh"] Sep 29 21:42:26 crc kubenswrapper[4911]: I0929 21:42:26.837141 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1fee9300-cab0-441a-a47f-a5a1fc02c24d-config-data\") pod \"barbican-keystone-listener-5d79d56b48-nd52h\" (UID: \"1fee9300-cab0-441a-a47f-a5a1fc02c24d\") " pod="openstack/barbican-keystone-listener-5d79d56b48-nd52h" Sep 29 21:42:26 crc kubenswrapper[4911]: I0929 21:42:26.840896 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zwljv\" (UniqueName: \"kubernetes.io/projected/1fee9300-cab0-441a-a47f-a5a1fc02c24d-kube-api-access-zwljv\") pod \"barbican-keystone-listener-5d79d56b48-nd52h\" (UID: \"1fee9300-cab0-441a-a47f-a5a1fc02c24d\") " pod="openstack/barbican-keystone-listener-5d79d56b48-nd52h" Sep 29 21:42:26 crc kubenswrapper[4911]: I0929 21:42:26.921785 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7eac87a1-5eb9-48e6-a3f1-f0a78d494674-ovsdbserver-nb\") pod \"dnsmasq-dns-75c8ddd69c-jqmrp\" (UID: \"7eac87a1-5eb9-48e6-a3f1-f0a78d494674\") " pod="openstack/dnsmasq-dns-75c8ddd69c-jqmrp" Sep 29 21:42:26 crc kubenswrapper[4911]: I0929 21:42:26.921842 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sr4b2\" (UniqueName: \"kubernetes.io/projected/7eac87a1-5eb9-48e6-a3f1-f0a78d494674-kube-api-access-sr4b2\") pod \"dnsmasq-dns-75c8ddd69c-jqmrp\" (UID: \"7eac87a1-5eb9-48e6-a3f1-f0a78d494674\") " pod="openstack/dnsmasq-dns-75c8ddd69c-jqmrp" Sep 29 21:42:26 crc kubenswrapper[4911]: I0929 21:42:26.921880 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/40533d70-c10c-4b0e-b870-f50e423ccdc5-logs\") pod \"barbican-api-79b7f696f8-zxrwh\" (UID: \"40533d70-c10c-4b0e-b870-f50e423ccdc5\") " pod="openstack/barbican-api-79b7f696f8-zxrwh" Sep 29 21:42:26 crc kubenswrapper[4911]: I0929 21:42:26.922109 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40533d70-c10c-4b0e-b870-f50e423ccdc5-config-data\") pod \"barbican-api-79b7f696f8-zxrwh\" (UID: \"40533d70-c10c-4b0e-b870-f50e423ccdc5\") " pod="openstack/barbican-api-79b7f696f8-zxrwh" Sep 29 21:42:26 crc kubenswrapper[4911]: I0929 21:42:26.922161 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7eac87a1-5eb9-48e6-a3f1-f0a78d494674-dns-swift-storage-0\") pod \"dnsmasq-dns-75c8ddd69c-jqmrp\" (UID: \"7eac87a1-5eb9-48e6-a3f1-f0a78d494674\") " pod="openstack/dnsmasq-dns-75c8ddd69c-jqmrp" Sep 29 21:42:26 crc kubenswrapper[4911]: I0929 21:42:26.922203 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7eac87a1-5eb9-48e6-a3f1-f0a78d494674-config\") pod \"dnsmasq-dns-75c8ddd69c-jqmrp\" (UID: \"7eac87a1-5eb9-48e6-a3f1-f0a78d494674\") " pod="openstack/dnsmasq-dns-75c8ddd69c-jqmrp" Sep 29 21:42:26 crc kubenswrapper[4911]: I0929 21:42:26.922256 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7eac87a1-5eb9-48e6-a3f1-f0a78d494674-dns-svc\") pod \"dnsmasq-dns-75c8ddd69c-jqmrp\" (UID: \"7eac87a1-5eb9-48e6-a3f1-f0a78d494674\") " pod="openstack/dnsmasq-dns-75c8ddd69c-jqmrp" Sep 29 21:42:26 crc kubenswrapper[4911]: I0929 21:42:26.922317 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40533d70-c10c-4b0e-b870-f50e423ccdc5-combined-ca-bundle\") pod \"barbican-api-79b7f696f8-zxrwh\" (UID: \"40533d70-c10c-4b0e-b870-f50e423ccdc5\") " pod="openstack/barbican-api-79b7f696f8-zxrwh" Sep 29 21:42:26 crc kubenswrapper[4911]: I0929 21:42:26.922368 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/40533d70-c10c-4b0e-b870-f50e423ccdc5-config-data-custom\") pod \"barbican-api-79b7f696f8-zxrwh\" (UID: \"40533d70-c10c-4b0e-b870-f50e423ccdc5\") " pod="openstack/barbican-api-79b7f696f8-zxrwh" Sep 29 21:42:26 crc kubenswrapper[4911]: I0929 21:42:26.922409 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-flp5w\" (UniqueName: \"kubernetes.io/projected/40533d70-c10c-4b0e-b870-f50e423ccdc5-kube-api-access-flp5w\") pod \"barbican-api-79b7f696f8-zxrwh\" (UID: \"40533d70-c10c-4b0e-b870-f50e423ccdc5\") " pod="openstack/barbican-api-79b7f696f8-zxrwh" Sep 29 21:42:26 crc kubenswrapper[4911]: I0929 21:42:26.922485 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7eac87a1-5eb9-48e6-a3f1-f0a78d494674-ovsdbserver-sb\") pod \"dnsmasq-dns-75c8ddd69c-jqmrp\" (UID: \"7eac87a1-5eb9-48e6-a3f1-f0a78d494674\") " pod="openstack/dnsmasq-dns-75c8ddd69c-jqmrp" Sep 29 21:42:26 crc kubenswrapper[4911]: I0929 21:42:26.974142 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-844ff9b7c7-s8vcn" Sep 29 21:42:26 crc kubenswrapper[4911]: I0929 21:42:26.990005 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-5d79d56b48-nd52h" Sep 29 21:42:27 crc kubenswrapper[4911]: I0929 21:42:27.023668 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/40533d70-c10c-4b0e-b870-f50e423ccdc5-logs\") pod \"barbican-api-79b7f696f8-zxrwh\" (UID: \"40533d70-c10c-4b0e-b870-f50e423ccdc5\") " pod="openstack/barbican-api-79b7f696f8-zxrwh" Sep 29 21:42:27 crc kubenswrapper[4911]: I0929 21:42:27.023756 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40533d70-c10c-4b0e-b870-f50e423ccdc5-config-data\") pod \"barbican-api-79b7f696f8-zxrwh\" (UID: \"40533d70-c10c-4b0e-b870-f50e423ccdc5\") " pod="openstack/barbican-api-79b7f696f8-zxrwh" Sep 29 21:42:27 crc kubenswrapper[4911]: I0929 21:42:27.023783 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7eac87a1-5eb9-48e6-a3f1-f0a78d494674-dns-swift-storage-0\") pod \"dnsmasq-dns-75c8ddd69c-jqmrp\" (UID: \"7eac87a1-5eb9-48e6-a3f1-f0a78d494674\") " pod="openstack/dnsmasq-dns-75c8ddd69c-jqmrp" Sep 29 21:42:27 crc kubenswrapper[4911]: I0929 21:42:27.023829 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7eac87a1-5eb9-48e6-a3f1-f0a78d494674-config\") pod \"dnsmasq-dns-75c8ddd69c-jqmrp\" (UID: \"7eac87a1-5eb9-48e6-a3f1-f0a78d494674\") " pod="openstack/dnsmasq-dns-75c8ddd69c-jqmrp" Sep 29 21:42:27 crc kubenswrapper[4911]: I0929 21:42:27.023851 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7eac87a1-5eb9-48e6-a3f1-f0a78d494674-dns-svc\") pod \"dnsmasq-dns-75c8ddd69c-jqmrp\" (UID: \"7eac87a1-5eb9-48e6-a3f1-f0a78d494674\") " pod="openstack/dnsmasq-dns-75c8ddd69c-jqmrp" Sep 29 21:42:27 crc kubenswrapper[4911]: I0929 21:42:27.023873 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40533d70-c10c-4b0e-b870-f50e423ccdc5-combined-ca-bundle\") pod \"barbican-api-79b7f696f8-zxrwh\" (UID: \"40533d70-c10c-4b0e-b870-f50e423ccdc5\") " pod="openstack/barbican-api-79b7f696f8-zxrwh" Sep 29 21:42:27 crc kubenswrapper[4911]: I0929 21:42:27.023897 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/40533d70-c10c-4b0e-b870-f50e423ccdc5-config-data-custom\") pod \"barbican-api-79b7f696f8-zxrwh\" (UID: \"40533d70-c10c-4b0e-b870-f50e423ccdc5\") " pod="openstack/barbican-api-79b7f696f8-zxrwh" Sep 29 21:42:27 crc kubenswrapper[4911]: I0929 21:42:27.023922 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-flp5w\" (UniqueName: \"kubernetes.io/projected/40533d70-c10c-4b0e-b870-f50e423ccdc5-kube-api-access-flp5w\") pod \"barbican-api-79b7f696f8-zxrwh\" (UID: \"40533d70-c10c-4b0e-b870-f50e423ccdc5\") " pod="openstack/barbican-api-79b7f696f8-zxrwh" Sep 29 21:42:27 crc kubenswrapper[4911]: I0929 21:42:27.023954 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7eac87a1-5eb9-48e6-a3f1-f0a78d494674-ovsdbserver-sb\") pod \"dnsmasq-dns-75c8ddd69c-jqmrp\" (UID: \"7eac87a1-5eb9-48e6-a3f1-f0a78d494674\") " pod="openstack/dnsmasq-dns-75c8ddd69c-jqmrp" Sep 29 21:42:27 crc kubenswrapper[4911]: I0929 21:42:27.023978 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7eac87a1-5eb9-48e6-a3f1-f0a78d494674-ovsdbserver-nb\") pod \"dnsmasq-dns-75c8ddd69c-jqmrp\" (UID: \"7eac87a1-5eb9-48e6-a3f1-f0a78d494674\") " pod="openstack/dnsmasq-dns-75c8ddd69c-jqmrp" Sep 29 21:42:27 crc kubenswrapper[4911]: I0929 21:42:27.023994 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sr4b2\" (UniqueName: \"kubernetes.io/projected/7eac87a1-5eb9-48e6-a3f1-f0a78d494674-kube-api-access-sr4b2\") pod \"dnsmasq-dns-75c8ddd69c-jqmrp\" (UID: \"7eac87a1-5eb9-48e6-a3f1-f0a78d494674\") " pod="openstack/dnsmasq-dns-75c8ddd69c-jqmrp" Sep 29 21:42:27 crc kubenswrapper[4911]: I0929 21:42:27.024181 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/40533d70-c10c-4b0e-b870-f50e423ccdc5-logs\") pod \"barbican-api-79b7f696f8-zxrwh\" (UID: \"40533d70-c10c-4b0e-b870-f50e423ccdc5\") " pod="openstack/barbican-api-79b7f696f8-zxrwh" Sep 29 21:42:27 crc kubenswrapper[4911]: I0929 21:42:27.026478 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7eac87a1-5eb9-48e6-a3f1-f0a78d494674-ovsdbserver-sb\") pod \"dnsmasq-dns-75c8ddd69c-jqmrp\" (UID: \"7eac87a1-5eb9-48e6-a3f1-f0a78d494674\") " pod="openstack/dnsmasq-dns-75c8ddd69c-jqmrp" Sep 29 21:42:27 crc kubenswrapper[4911]: I0929 21:42:27.026479 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7eac87a1-5eb9-48e6-a3f1-f0a78d494674-dns-swift-storage-0\") pod \"dnsmasq-dns-75c8ddd69c-jqmrp\" (UID: \"7eac87a1-5eb9-48e6-a3f1-f0a78d494674\") " pod="openstack/dnsmasq-dns-75c8ddd69c-jqmrp" Sep 29 21:42:27 crc kubenswrapper[4911]: I0929 21:42:27.026573 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7eac87a1-5eb9-48e6-a3f1-f0a78d494674-config\") pod \"dnsmasq-dns-75c8ddd69c-jqmrp\" (UID: \"7eac87a1-5eb9-48e6-a3f1-f0a78d494674\") " pod="openstack/dnsmasq-dns-75c8ddd69c-jqmrp" Sep 29 21:42:27 crc kubenswrapper[4911]: I0929 21:42:27.026949 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7eac87a1-5eb9-48e6-a3f1-f0a78d494674-dns-svc\") pod \"dnsmasq-dns-75c8ddd69c-jqmrp\" (UID: \"7eac87a1-5eb9-48e6-a3f1-f0a78d494674\") " pod="openstack/dnsmasq-dns-75c8ddd69c-jqmrp" Sep 29 21:42:27 crc kubenswrapper[4911]: I0929 21:42:27.028422 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40533d70-c10c-4b0e-b870-f50e423ccdc5-combined-ca-bundle\") pod \"barbican-api-79b7f696f8-zxrwh\" (UID: \"40533d70-c10c-4b0e-b870-f50e423ccdc5\") " pod="openstack/barbican-api-79b7f696f8-zxrwh" Sep 29 21:42:27 crc kubenswrapper[4911]: I0929 21:42:27.029620 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/40533d70-c10c-4b0e-b870-f50e423ccdc5-config-data-custom\") pod \"barbican-api-79b7f696f8-zxrwh\" (UID: \"40533d70-c10c-4b0e-b870-f50e423ccdc5\") " pod="openstack/barbican-api-79b7f696f8-zxrwh" Sep 29 21:42:27 crc kubenswrapper[4911]: I0929 21:42:27.031948 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7eac87a1-5eb9-48e6-a3f1-f0a78d494674-ovsdbserver-nb\") pod \"dnsmasq-dns-75c8ddd69c-jqmrp\" (UID: \"7eac87a1-5eb9-48e6-a3f1-f0a78d494674\") " pod="openstack/dnsmasq-dns-75c8ddd69c-jqmrp" Sep 29 21:42:27 crc kubenswrapper[4911]: I0929 21:42:27.034727 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40533d70-c10c-4b0e-b870-f50e423ccdc5-config-data\") pod \"barbican-api-79b7f696f8-zxrwh\" (UID: \"40533d70-c10c-4b0e-b870-f50e423ccdc5\") " pod="openstack/barbican-api-79b7f696f8-zxrwh" Sep 29 21:42:27 crc kubenswrapper[4911]: I0929 21:42:27.041007 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-flp5w\" (UniqueName: \"kubernetes.io/projected/40533d70-c10c-4b0e-b870-f50e423ccdc5-kube-api-access-flp5w\") pod \"barbican-api-79b7f696f8-zxrwh\" (UID: \"40533d70-c10c-4b0e-b870-f50e423ccdc5\") " pod="openstack/barbican-api-79b7f696f8-zxrwh" Sep 29 21:42:27 crc kubenswrapper[4911]: I0929 21:42:27.042064 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sr4b2\" (UniqueName: \"kubernetes.io/projected/7eac87a1-5eb9-48e6-a3f1-f0a78d494674-kube-api-access-sr4b2\") pod \"dnsmasq-dns-75c8ddd69c-jqmrp\" (UID: \"7eac87a1-5eb9-48e6-a3f1-f0a78d494674\") " pod="openstack/dnsmasq-dns-75c8ddd69c-jqmrp" Sep 29 21:42:27 crc kubenswrapper[4911]: I0929 21:42:27.170495 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75c8ddd69c-jqmrp" Sep 29 21:42:27 crc kubenswrapper[4911]: I0929 21:42:27.195164 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-79b7f696f8-zxrwh" Sep 29 21:42:27 crc kubenswrapper[4911]: I0929 21:42:27.295129 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-84b966f6c9-9c895" podUID="dd3556aa-f536-44fc-b9b4-ebf1fb064f4e" containerName="dnsmasq-dns" containerID="cri-o://bd9c5d91244c0801853f294bf67b5856b04e2f3531916a51e5af50e6ff73fb78" gracePeriod=10 Sep 29 21:42:27 crc kubenswrapper[4911]: I0929 21:42:27.295558 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b07b47db-f8f0-4357-8bcd-4d3bef88f8d4","Type":"ContainerStarted","Data":"23fbd4c167971799adbe00d529a6565810abfa781e566dbf6dd6f75673928a12"} Sep 29 21:42:27 crc kubenswrapper[4911]: I0929 21:42:27.295848 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 29 21:42:27 crc kubenswrapper[4911]: I0929 21:42:27.328816 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.9602408470000001 podStartE2EDuration="5.328776106s" podCreationTimestamp="2025-09-29 21:42:22 +0000 UTC" firstStartedPulling="2025-09-29 21:42:23.202527676 +0000 UTC m=+1021.179640347" lastFinishedPulling="2025-09-29 21:42:26.571062935 +0000 UTC m=+1024.548175606" observedRunningTime="2025-09-29 21:42:27.321117189 +0000 UTC m=+1025.298229860" watchObservedRunningTime="2025-09-29 21:42:27.328776106 +0000 UTC m=+1025.305888777" Sep 29 21:42:27 crc kubenswrapper[4911]: I0929 21:42:27.449528 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-844ff9b7c7-s8vcn"] Sep 29 21:42:27 crc kubenswrapper[4911]: W0929 21:42:27.470322 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod115e92e2_b175_4f4b_a9fe_cdd0b7d3d104.slice/crio-b3660122bd11ed3e563a1ae1ee373e64983bdd3ccb320c064e189a7547045572 WatchSource:0}: Error finding container b3660122bd11ed3e563a1ae1ee373e64983bdd3ccb320c064e189a7547045572: Status 404 returned error can't find the container with id b3660122bd11ed3e563a1ae1ee373e64983bdd3ccb320c064e189a7547045572 Sep 29 21:42:27 crc kubenswrapper[4911]: I0929 21:42:27.548888 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-5d79d56b48-nd52h"] Sep 29 21:42:27 crc kubenswrapper[4911]: I0929 21:42:27.710341 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-75c8ddd69c-jqmrp"] Sep 29 21:42:27 crc kubenswrapper[4911]: I0929 21:42:27.776051 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-79b7f696f8-zxrwh"] Sep 29 21:42:27 crc kubenswrapper[4911]: I0929 21:42:27.862246 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84b966f6c9-9c895" Sep 29 21:42:27 crc kubenswrapper[4911]: I0929 21:42:27.973127 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-prdl8\" (UniqueName: \"kubernetes.io/projected/dd3556aa-f536-44fc-b9b4-ebf1fb064f4e-kube-api-access-prdl8\") pod \"dd3556aa-f536-44fc-b9b4-ebf1fb064f4e\" (UID: \"dd3556aa-f536-44fc-b9b4-ebf1fb064f4e\") " Sep 29 21:42:27 crc kubenswrapper[4911]: I0929 21:42:27.973191 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dd3556aa-f536-44fc-b9b4-ebf1fb064f4e-ovsdbserver-nb\") pod \"dd3556aa-f536-44fc-b9b4-ebf1fb064f4e\" (UID: \"dd3556aa-f536-44fc-b9b4-ebf1fb064f4e\") " Sep 29 21:42:27 crc kubenswrapper[4911]: I0929 21:42:27.973214 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dd3556aa-f536-44fc-b9b4-ebf1fb064f4e-dns-svc\") pod \"dd3556aa-f536-44fc-b9b4-ebf1fb064f4e\" (UID: \"dd3556aa-f536-44fc-b9b4-ebf1fb064f4e\") " Sep 29 21:42:27 crc kubenswrapper[4911]: I0929 21:42:27.973253 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dd3556aa-f536-44fc-b9b4-ebf1fb064f4e-ovsdbserver-sb\") pod \"dd3556aa-f536-44fc-b9b4-ebf1fb064f4e\" (UID: \"dd3556aa-f536-44fc-b9b4-ebf1fb064f4e\") " Sep 29 21:42:27 crc kubenswrapper[4911]: I0929 21:42:27.973277 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/dd3556aa-f536-44fc-b9b4-ebf1fb064f4e-dns-swift-storage-0\") pod \"dd3556aa-f536-44fc-b9b4-ebf1fb064f4e\" (UID: \"dd3556aa-f536-44fc-b9b4-ebf1fb064f4e\") " Sep 29 21:42:27 crc kubenswrapper[4911]: I0929 21:42:27.973321 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dd3556aa-f536-44fc-b9b4-ebf1fb064f4e-config\") pod \"dd3556aa-f536-44fc-b9b4-ebf1fb064f4e\" (UID: \"dd3556aa-f536-44fc-b9b4-ebf1fb064f4e\") " Sep 29 21:42:27 crc kubenswrapper[4911]: I0929 21:42:27.978773 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dd3556aa-f536-44fc-b9b4-ebf1fb064f4e-kube-api-access-prdl8" (OuterVolumeSpecName: "kube-api-access-prdl8") pod "dd3556aa-f536-44fc-b9b4-ebf1fb064f4e" (UID: "dd3556aa-f536-44fc-b9b4-ebf1fb064f4e"). InnerVolumeSpecName "kube-api-access-prdl8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:42:28 crc kubenswrapper[4911]: I0929 21:42:28.041249 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dd3556aa-f536-44fc-b9b4-ebf1fb064f4e-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "dd3556aa-f536-44fc-b9b4-ebf1fb064f4e" (UID: "dd3556aa-f536-44fc-b9b4-ebf1fb064f4e"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:42:28 crc kubenswrapper[4911]: I0929 21:42:28.045074 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dd3556aa-f536-44fc-b9b4-ebf1fb064f4e-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "dd3556aa-f536-44fc-b9b4-ebf1fb064f4e" (UID: "dd3556aa-f536-44fc-b9b4-ebf1fb064f4e"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:42:28 crc kubenswrapper[4911]: I0929 21:42:28.051244 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dd3556aa-f536-44fc-b9b4-ebf1fb064f4e-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "dd3556aa-f536-44fc-b9b4-ebf1fb064f4e" (UID: "dd3556aa-f536-44fc-b9b4-ebf1fb064f4e"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:42:28 crc kubenswrapper[4911]: I0929 21:42:28.053269 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dd3556aa-f536-44fc-b9b4-ebf1fb064f4e-config" (OuterVolumeSpecName: "config") pod "dd3556aa-f536-44fc-b9b4-ebf1fb064f4e" (UID: "dd3556aa-f536-44fc-b9b4-ebf1fb064f4e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:42:28 crc kubenswrapper[4911]: I0929 21:42:28.058175 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dd3556aa-f536-44fc-b9b4-ebf1fb064f4e-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "dd3556aa-f536-44fc-b9b4-ebf1fb064f4e" (UID: "dd3556aa-f536-44fc-b9b4-ebf1fb064f4e"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:42:28 crc kubenswrapper[4911]: I0929 21:42:28.075388 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-prdl8\" (UniqueName: \"kubernetes.io/projected/dd3556aa-f536-44fc-b9b4-ebf1fb064f4e-kube-api-access-prdl8\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:28 crc kubenswrapper[4911]: I0929 21:42:28.075420 4911 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dd3556aa-f536-44fc-b9b4-ebf1fb064f4e-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:28 crc kubenswrapper[4911]: I0929 21:42:28.075431 4911 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dd3556aa-f536-44fc-b9b4-ebf1fb064f4e-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:28 crc kubenswrapper[4911]: I0929 21:42:28.075440 4911 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dd3556aa-f536-44fc-b9b4-ebf1fb064f4e-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:28 crc kubenswrapper[4911]: I0929 21:42:28.075451 4911 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/dd3556aa-f536-44fc-b9b4-ebf1fb064f4e-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:28 crc kubenswrapper[4911]: I0929 21:42:28.075460 4911 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dd3556aa-f536-44fc-b9b4-ebf1fb064f4e-config\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:28 crc kubenswrapper[4911]: I0929 21:42:28.308353 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-5d79d56b48-nd52h" event={"ID":"1fee9300-cab0-441a-a47f-a5a1fc02c24d","Type":"ContainerStarted","Data":"92404533fb61166d689e74fae07c49fc6eebb4e956c262ced493fe07ec0ebe41"} Sep 29 21:42:28 crc kubenswrapper[4911]: I0929 21:42:28.312038 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-844ff9b7c7-s8vcn" event={"ID":"115e92e2-b175-4f4b-a9fe-cdd0b7d3d104","Type":"ContainerStarted","Data":"b3660122bd11ed3e563a1ae1ee373e64983bdd3ccb320c064e189a7547045572"} Sep 29 21:42:28 crc kubenswrapper[4911]: I0929 21:42:28.312483 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-7fbf6b8688-rgddw" Sep 29 21:42:28 crc kubenswrapper[4911]: I0929 21:42:28.317312 4911 generic.go:334] "Generic (PLEG): container finished" podID="7eac87a1-5eb9-48e6-a3f1-f0a78d494674" containerID="ca08e0396e3f4ef3b97f8c84ed29bceb068d9a4f573bdb2d5fb15a986fa77d89" exitCode=0 Sep 29 21:42:28 crc kubenswrapper[4911]: I0929 21:42:28.318261 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75c8ddd69c-jqmrp" event={"ID":"7eac87a1-5eb9-48e6-a3f1-f0a78d494674","Type":"ContainerDied","Data":"ca08e0396e3f4ef3b97f8c84ed29bceb068d9a4f573bdb2d5fb15a986fa77d89"} Sep 29 21:42:28 crc kubenswrapper[4911]: I0929 21:42:28.318299 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75c8ddd69c-jqmrp" event={"ID":"7eac87a1-5eb9-48e6-a3f1-f0a78d494674","Type":"ContainerStarted","Data":"ad864286b8614d0b7c510f5f787578fe5fcd421d2a79e1eb388615b9d3c7e3de"} Sep 29 21:42:28 crc kubenswrapper[4911]: I0929 21:42:28.329363 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-79b7f696f8-zxrwh" event={"ID":"40533d70-c10c-4b0e-b870-f50e423ccdc5","Type":"ContainerStarted","Data":"d9a05df345f8f6c2fb47fc2effbfb55613e3e6d2fb36ce8168c9188bc9bacb3e"} Sep 29 21:42:28 crc kubenswrapper[4911]: I0929 21:42:28.329405 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-79b7f696f8-zxrwh" event={"ID":"40533d70-c10c-4b0e-b870-f50e423ccdc5","Type":"ContainerStarted","Data":"c74c12f3e3fbd37473edcf5cbd3e6907e4c97317591e7796d4c7d9e742e6cb44"} Sep 29 21:42:28 crc kubenswrapper[4911]: I0929 21:42:28.329420 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-79b7f696f8-zxrwh" event={"ID":"40533d70-c10c-4b0e-b870-f50e423ccdc5","Type":"ContainerStarted","Data":"4d0e8f269b8b41e38396156d7783ae0ed622f527967ee7e497ec59fab11ec871"} Sep 29 21:42:28 crc kubenswrapper[4911]: I0929 21:42:28.329452 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-79b7f696f8-zxrwh" Sep 29 21:42:28 crc kubenswrapper[4911]: I0929 21:42:28.329577 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-79b7f696f8-zxrwh" Sep 29 21:42:28 crc kubenswrapper[4911]: I0929 21:42:28.344511 4911 generic.go:334] "Generic (PLEG): container finished" podID="dd3556aa-f536-44fc-b9b4-ebf1fb064f4e" containerID="bd9c5d91244c0801853f294bf67b5856b04e2f3531916a51e5af50e6ff73fb78" exitCode=0 Sep 29 21:42:28 crc kubenswrapper[4911]: I0929 21:42:28.344603 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84b966f6c9-9c895" Sep 29 21:42:28 crc kubenswrapper[4911]: I0929 21:42:28.344624 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84b966f6c9-9c895" event={"ID":"dd3556aa-f536-44fc-b9b4-ebf1fb064f4e","Type":"ContainerDied","Data":"bd9c5d91244c0801853f294bf67b5856b04e2f3531916a51e5af50e6ff73fb78"} Sep 29 21:42:28 crc kubenswrapper[4911]: I0929 21:42:28.345517 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84b966f6c9-9c895" event={"ID":"dd3556aa-f536-44fc-b9b4-ebf1fb064f4e","Type":"ContainerDied","Data":"167bd3bb23ce3adb7d17dc8f178b8c16527f35438e3fac01613db119c872157a"} Sep 29 21:42:28 crc kubenswrapper[4911]: I0929 21:42:28.345537 4911 scope.go:117] "RemoveContainer" containerID="bd9c5d91244c0801853f294bf67b5856b04e2f3531916a51e5af50e6ff73fb78" Sep 29 21:42:28 crc kubenswrapper[4911]: I0929 21:42:28.369284 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-79b7f696f8-zxrwh" podStartSLOduration=2.369267038 podStartE2EDuration="2.369267038s" podCreationTimestamp="2025-09-29 21:42:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:42:28.364037906 +0000 UTC m=+1026.341150577" watchObservedRunningTime="2025-09-29 21:42:28.369267038 +0000 UTC m=+1026.346379709" Sep 29 21:42:28 crc kubenswrapper[4911]: I0929 21:42:28.380215 4911 scope.go:117] "RemoveContainer" containerID="87b5dfa51e94bc45d7087f6e2480e0871e1ce6e7ed90b1078d3bb6628c569153" Sep 29 21:42:28 crc kubenswrapper[4911]: I0929 21:42:28.402775 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-84b966f6c9-9c895"] Sep 29 21:42:28 crc kubenswrapper[4911]: I0929 21:42:28.410968 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-84b966f6c9-9c895"] Sep 29 21:42:28 crc kubenswrapper[4911]: I0929 21:42:28.425287 4911 scope.go:117] "RemoveContainer" containerID="bd9c5d91244c0801853f294bf67b5856b04e2f3531916a51e5af50e6ff73fb78" Sep 29 21:42:28 crc kubenswrapper[4911]: E0929 21:42:28.426090 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bd9c5d91244c0801853f294bf67b5856b04e2f3531916a51e5af50e6ff73fb78\": container with ID starting with bd9c5d91244c0801853f294bf67b5856b04e2f3531916a51e5af50e6ff73fb78 not found: ID does not exist" containerID="bd9c5d91244c0801853f294bf67b5856b04e2f3531916a51e5af50e6ff73fb78" Sep 29 21:42:28 crc kubenswrapper[4911]: I0929 21:42:28.426195 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bd9c5d91244c0801853f294bf67b5856b04e2f3531916a51e5af50e6ff73fb78"} err="failed to get container status \"bd9c5d91244c0801853f294bf67b5856b04e2f3531916a51e5af50e6ff73fb78\": rpc error: code = NotFound desc = could not find container \"bd9c5d91244c0801853f294bf67b5856b04e2f3531916a51e5af50e6ff73fb78\": container with ID starting with bd9c5d91244c0801853f294bf67b5856b04e2f3531916a51e5af50e6ff73fb78 not found: ID does not exist" Sep 29 21:42:28 crc kubenswrapper[4911]: I0929 21:42:28.426270 4911 scope.go:117] "RemoveContainer" containerID="87b5dfa51e94bc45d7087f6e2480e0871e1ce6e7ed90b1078d3bb6628c569153" Sep 29 21:42:28 crc kubenswrapper[4911]: E0929 21:42:28.426726 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"87b5dfa51e94bc45d7087f6e2480e0871e1ce6e7ed90b1078d3bb6628c569153\": container with ID starting with 87b5dfa51e94bc45d7087f6e2480e0871e1ce6e7ed90b1078d3bb6628c569153 not found: ID does not exist" containerID="87b5dfa51e94bc45d7087f6e2480e0871e1ce6e7ed90b1078d3bb6628c569153" Sep 29 21:42:28 crc kubenswrapper[4911]: I0929 21:42:28.426774 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"87b5dfa51e94bc45d7087f6e2480e0871e1ce6e7ed90b1078d3bb6628c569153"} err="failed to get container status \"87b5dfa51e94bc45d7087f6e2480e0871e1ce6e7ed90b1078d3bb6628c569153\": rpc error: code = NotFound desc = could not find container \"87b5dfa51e94bc45d7087f6e2480e0871e1ce6e7ed90b1078d3bb6628c569153\": container with ID starting with 87b5dfa51e94bc45d7087f6e2480e0871e1ce6e7ed90b1078d3bb6628c569153 not found: ID does not exist" Sep 29 21:42:28 crc kubenswrapper[4911]: I0929 21:42:28.620094 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Sep 29 21:42:28 crc kubenswrapper[4911]: E0929 21:42:28.620568 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd3556aa-f536-44fc-b9b4-ebf1fb064f4e" containerName="init" Sep 29 21:42:28 crc kubenswrapper[4911]: I0929 21:42:28.620581 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd3556aa-f536-44fc-b9b4-ebf1fb064f4e" containerName="init" Sep 29 21:42:28 crc kubenswrapper[4911]: E0929 21:42:28.620610 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd3556aa-f536-44fc-b9b4-ebf1fb064f4e" containerName="dnsmasq-dns" Sep 29 21:42:28 crc kubenswrapper[4911]: I0929 21:42:28.620615 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd3556aa-f536-44fc-b9b4-ebf1fb064f4e" containerName="dnsmasq-dns" Sep 29 21:42:28 crc kubenswrapper[4911]: I0929 21:42:28.622974 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd3556aa-f536-44fc-b9b4-ebf1fb064f4e" containerName="dnsmasq-dns" Sep 29 21:42:28 crc kubenswrapper[4911]: I0929 21:42:28.623553 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 29 21:42:28 crc kubenswrapper[4911]: I0929 21:42:28.626561 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-gt6k2" Sep 29 21:42:28 crc kubenswrapper[4911]: I0929 21:42:28.626951 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Sep 29 21:42:28 crc kubenswrapper[4911]: I0929 21:42:28.626949 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Sep 29 21:42:28 crc kubenswrapper[4911]: I0929 21:42:28.649809 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Sep 29 21:42:28 crc kubenswrapper[4911]: I0929 21:42:28.714770 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dd3556aa-f536-44fc-b9b4-ebf1fb064f4e" path="/var/lib/kubelet/pods/dd3556aa-f536-44fc-b9b4-ebf1fb064f4e/volumes" Sep 29 21:42:28 crc kubenswrapper[4911]: I0929 21:42:28.787626 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/3ca83d01-449e-43fd-a6e3-3a1da30ec45b-openstack-config\") pod \"openstackclient\" (UID: \"3ca83d01-449e-43fd-a6e3-3a1da30ec45b\") " pod="openstack/openstackclient" Sep 29 21:42:28 crc kubenswrapper[4911]: I0929 21:42:28.788017 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ca83d01-449e-43fd-a6e3-3a1da30ec45b-combined-ca-bundle\") pod \"openstackclient\" (UID: \"3ca83d01-449e-43fd-a6e3-3a1da30ec45b\") " pod="openstack/openstackclient" Sep 29 21:42:28 crc kubenswrapper[4911]: I0929 21:42:28.788181 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xmmfq\" (UniqueName: \"kubernetes.io/projected/3ca83d01-449e-43fd-a6e3-3a1da30ec45b-kube-api-access-xmmfq\") pod \"openstackclient\" (UID: \"3ca83d01-449e-43fd-a6e3-3a1da30ec45b\") " pod="openstack/openstackclient" Sep 29 21:42:28 crc kubenswrapper[4911]: I0929 21:42:28.788284 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/3ca83d01-449e-43fd-a6e3-3a1da30ec45b-openstack-config-secret\") pod \"openstackclient\" (UID: \"3ca83d01-449e-43fd-a6e3-3a1da30ec45b\") " pod="openstack/openstackclient" Sep 29 21:42:28 crc kubenswrapper[4911]: I0929 21:42:28.891436 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xmmfq\" (UniqueName: \"kubernetes.io/projected/3ca83d01-449e-43fd-a6e3-3a1da30ec45b-kube-api-access-xmmfq\") pod \"openstackclient\" (UID: \"3ca83d01-449e-43fd-a6e3-3a1da30ec45b\") " pod="openstack/openstackclient" Sep 29 21:42:28 crc kubenswrapper[4911]: I0929 21:42:28.891548 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/3ca83d01-449e-43fd-a6e3-3a1da30ec45b-openstack-config-secret\") pod \"openstackclient\" (UID: \"3ca83d01-449e-43fd-a6e3-3a1da30ec45b\") " pod="openstack/openstackclient" Sep 29 21:42:28 crc kubenswrapper[4911]: I0929 21:42:28.891630 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/3ca83d01-449e-43fd-a6e3-3a1da30ec45b-openstack-config\") pod \"openstackclient\" (UID: \"3ca83d01-449e-43fd-a6e3-3a1da30ec45b\") " pod="openstack/openstackclient" Sep 29 21:42:28 crc kubenswrapper[4911]: I0929 21:42:28.891748 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ca83d01-449e-43fd-a6e3-3a1da30ec45b-combined-ca-bundle\") pod \"openstackclient\" (UID: \"3ca83d01-449e-43fd-a6e3-3a1da30ec45b\") " pod="openstack/openstackclient" Sep 29 21:42:28 crc kubenswrapper[4911]: I0929 21:42:28.892744 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/3ca83d01-449e-43fd-a6e3-3a1da30ec45b-openstack-config\") pod \"openstackclient\" (UID: \"3ca83d01-449e-43fd-a6e3-3a1da30ec45b\") " pod="openstack/openstackclient" Sep 29 21:42:28 crc kubenswrapper[4911]: I0929 21:42:28.896968 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/3ca83d01-449e-43fd-a6e3-3a1da30ec45b-openstack-config-secret\") pod \"openstackclient\" (UID: \"3ca83d01-449e-43fd-a6e3-3a1da30ec45b\") " pod="openstack/openstackclient" Sep 29 21:42:28 crc kubenswrapper[4911]: I0929 21:42:28.897565 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ca83d01-449e-43fd-a6e3-3a1da30ec45b-combined-ca-bundle\") pod \"openstackclient\" (UID: \"3ca83d01-449e-43fd-a6e3-3a1da30ec45b\") " pod="openstack/openstackclient" Sep 29 21:42:28 crc kubenswrapper[4911]: I0929 21:42:28.907428 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xmmfq\" (UniqueName: \"kubernetes.io/projected/3ca83d01-449e-43fd-a6e3-3a1da30ec45b-kube-api-access-xmmfq\") pod \"openstackclient\" (UID: \"3ca83d01-449e-43fd-a6e3-3a1da30ec45b\") " pod="openstack/openstackclient" Sep 29 21:42:28 crc kubenswrapper[4911]: I0929 21:42:28.981058 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 29 21:42:29 crc kubenswrapper[4911]: I0929 21:42:29.362598 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75c8ddd69c-jqmrp" event={"ID":"7eac87a1-5eb9-48e6-a3f1-f0a78d494674","Type":"ContainerStarted","Data":"a59ef7cf8d8801d59cdcbd879a6aed303345bcd05fd7048905075c1614d5deeb"} Sep 29 21:42:29 crc kubenswrapper[4911]: I0929 21:42:29.362969 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-75c8ddd69c-jqmrp" Sep 29 21:42:29 crc kubenswrapper[4911]: I0929 21:42:29.384854 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-75c8ddd69c-jqmrp" podStartSLOduration=3.384835498 podStartE2EDuration="3.384835498s" podCreationTimestamp="2025-09-29 21:42:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:42:29.380666178 +0000 UTC m=+1027.357778869" watchObservedRunningTime="2025-09-29 21:42:29.384835498 +0000 UTC m=+1027.361948179" Sep 29 21:42:29 crc kubenswrapper[4911]: I0929 21:42:29.557657 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-7b8dc76f9b-7vhr7"] Sep 29 21:42:29 crc kubenswrapper[4911]: I0929 21:42:29.565004 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7b8dc76f9b-7vhr7" Sep 29 21:42:29 crc kubenswrapper[4911]: I0929 21:42:29.570025 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Sep 29 21:42:29 crc kubenswrapper[4911]: I0929 21:42:29.570344 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Sep 29 21:42:29 crc kubenswrapper[4911]: I0929 21:42:29.583551 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-7b8dc76f9b-7vhr7"] Sep 29 21:42:29 crc kubenswrapper[4911]: I0929 21:42:29.727987 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0b2e8086-d33d-420a-8dea-5e892d02b5eb-config-data-custom\") pod \"barbican-api-7b8dc76f9b-7vhr7\" (UID: \"0b2e8086-d33d-420a-8dea-5e892d02b5eb\") " pod="openstack/barbican-api-7b8dc76f9b-7vhr7" Sep 29 21:42:29 crc kubenswrapper[4911]: I0929 21:42:29.728363 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0b2e8086-d33d-420a-8dea-5e892d02b5eb-public-tls-certs\") pod \"barbican-api-7b8dc76f9b-7vhr7\" (UID: \"0b2e8086-d33d-420a-8dea-5e892d02b5eb\") " pod="openstack/barbican-api-7b8dc76f9b-7vhr7" Sep 29 21:42:29 crc kubenswrapper[4911]: I0929 21:42:29.728437 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0b2e8086-d33d-420a-8dea-5e892d02b5eb-logs\") pod \"barbican-api-7b8dc76f9b-7vhr7\" (UID: \"0b2e8086-d33d-420a-8dea-5e892d02b5eb\") " pod="openstack/barbican-api-7b8dc76f9b-7vhr7" Sep 29 21:42:29 crc kubenswrapper[4911]: I0929 21:42:29.728463 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0b2e8086-d33d-420a-8dea-5e892d02b5eb-config-data\") pod \"barbican-api-7b8dc76f9b-7vhr7\" (UID: \"0b2e8086-d33d-420a-8dea-5e892d02b5eb\") " pod="openstack/barbican-api-7b8dc76f9b-7vhr7" Sep 29 21:42:29 crc kubenswrapper[4911]: I0929 21:42:29.728544 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lr6zp\" (UniqueName: \"kubernetes.io/projected/0b2e8086-d33d-420a-8dea-5e892d02b5eb-kube-api-access-lr6zp\") pod \"barbican-api-7b8dc76f9b-7vhr7\" (UID: \"0b2e8086-d33d-420a-8dea-5e892d02b5eb\") " pod="openstack/barbican-api-7b8dc76f9b-7vhr7" Sep 29 21:42:29 crc kubenswrapper[4911]: I0929 21:42:29.728600 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b2e8086-d33d-420a-8dea-5e892d02b5eb-combined-ca-bundle\") pod \"barbican-api-7b8dc76f9b-7vhr7\" (UID: \"0b2e8086-d33d-420a-8dea-5e892d02b5eb\") " pod="openstack/barbican-api-7b8dc76f9b-7vhr7" Sep 29 21:42:29 crc kubenswrapper[4911]: I0929 21:42:29.728629 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0b2e8086-d33d-420a-8dea-5e892d02b5eb-internal-tls-certs\") pod \"barbican-api-7b8dc76f9b-7vhr7\" (UID: \"0b2e8086-d33d-420a-8dea-5e892d02b5eb\") " pod="openstack/barbican-api-7b8dc76f9b-7vhr7" Sep 29 21:42:29 crc kubenswrapper[4911]: I0929 21:42:29.831060 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0b2e8086-d33d-420a-8dea-5e892d02b5eb-logs\") pod \"barbican-api-7b8dc76f9b-7vhr7\" (UID: \"0b2e8086-d33d-420a-8dea-5e892d02b5eb\") " pod="openstack/barbican-api-7b8dc76f9b-7vhr7" Sep 29 21:42:29 crc kubenswrapper[4911]: I0929 21:42:29.831120 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0b2e8086-d33d-420a-8dea-5e892d02b5eb-config-data\") pod \"barbican-api-7b8dc76f9b-7vhr7\" (UID: \"0b2e8086-d33d-420a-8dea-5e892d02b5eb\") " pod="openstack/barbican-api-7b8dc76f9b-7vhr7" Sep 29 21:42:29 crc kubenswrapper[4911]: I0929 21:42:29.831237 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lr6zp\" (UniqueName: \"kubernetes.io/projected/0b2e8086-d33d-420a-8dea-5e892d02b5eb-kube-api-access-lr6zp\") pod \"barbican-api-7b8dc76f9b-7vhr7\" (UID: \"0b2e8086-d33d-420a-8dea-5e892d02b5eb\") " pod="openstack/barbican-api-7b8dc76f9b-7vhr7" Sep 29 21:42:29 crc kubenswrapper[4911]: I0929 21:42:29.831318 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b2e8086-d33d-420a-8dea-5e892d02b5eb-combined-ca-bundle\") pod \"barbican-api-7b8dc76f9b-7vhr7\" (UID: \"0b2e8086-d33d-420a-8dea-5e892d02b5eb\") " pod="openstack/barbican-api-7b8dc76f9b-7vhr7" Sep 29 21:42:29 crc kubenswrapper[4911]: I0929 21:42:29.831362 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0b2e8086-d33d-420a-8dea-5e892d02b5eb-internal-tls-certs\") pod \"barbican-api-7b8dc76f9b-7vhr7\" (UID: \"0b2e8086-d33d-420a-8dea-5e892d02b5eb\") " pod="openstack/barbican-api-7b8dc76f9b-7vhr7" Sep 29 21:42:29 crc kubenswrapper[4911]: I0929 21:42:29.831387 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0b2e8086-d33d-420a-8dea-5e892d02b5eb-config-data-custom\") pod \"barbican-api-7b8dc76f9b-7vhr7\" (UID: \"0b2e8086-d33d-420a-8dea-5e892d02b5eb\") " pod="openstack/barbican-api-7b8dc76f9b-7vhr7" Sep 29 21:42:29 crc kubenswrapper[4911]: I0929 21:42:29.831444 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0b2e8086-d33d-420a-8dea-5e892d02b5eb-public-tls-certs\") pod \"barbican-api-7b8dc76f9b-7vhr7\" (UID: \"0b2e8086-d33d-420a-8dea-5e892d02b5eb\") " pod="openstack/barbican-api-7b8dc76f9b-7vhr7" Sep 29 21:42:29 crc kubenswrapper[4911]: I0929 21:42:29.832325 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0b2e8086-d33d-420a-8dea-5e892d02b5eb-logs\") pod \"barbican-api-7b8dc76f9b-7vhr7\" (UID: \"0b2e8086-d33d-420a-8dea-5e892d02b5eb\") " pod="openstack/barbican-api-7b8dc76f9b-7vhr7" Sep 29 21:42:29 crc kubenswrapper[4911]: I0929 21:42:29.837401 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0b2e8086-d33d-420a-8dea-5e892d02b5eb-config-data\") pod \"barbican-api-7b8dc76f9b-7vhr7\" (UID: \"0b2e8086-d33d-420a-8dea-5e892d02b5eb\") " pod="openstack/barbican-api-7b8dc76f9b-7vhr7" Sep 29 21:42:29 crc kubenswrapper[4911]: I0929 21:42:29.838017 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0b2e8086-d33d-420a-8dea-5e892d02b5eb-internal-tls-certs\") pod \"barbican-api-7b8dc76f9b-7vhr7\" (UID: \"0b2e8086-d33d-420a-8dea-5e892d02b5eb\") " pod="openstack/barbican-api-7b8dc76f9b-7vhr7" Sep 29 21:42:29 crc kubenswrapper[4911]: I0929 21:42:29.838908 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0b2e8086-d33d-420a-8dea-5e892d02b5eb-public-tls-certs\") pod \"barbican-api-7b8dc76f9b-7vhr7\" (UID: \"0b2e8086-d33d-420a-8dea-5e892d02b5eb\") " pod="openstack/barbican-api-7b8dc76f9b-7vhr7" Sep 29 21:42:29 crc kubenswrapper[4911]: I0929 21:42:29.839437 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0b2e8086-d33d-420a-8dea-5e892d02b5eb-config-data-custom\") pod \"barbican-api-7b8dc76f9b-7vhr7\" (UID: \"0b2e8086-d33d-420a-8dea-5e892d02b5eb\") " pod="openstack/barbican-api-7b8dc76f9b-7vhr7" Sep 29 21:42:29 crc kubenswrapper[4911]: I0929 21:42:29.840080 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b2e8086-d33d-420a-8dea-5e892d02b5eb-combined-ca-bundle\") pod \"barbican-api-7b8dc76f9b-7vhr7\" (UID: \"0b2e8086-d33d-420a-8dea-5e892d02b5eb\") " pod="openstack/barbican-api-7b8dc76f9b-7vhr7" Sep 29 21:42:29 crc kubenswrapper[4911]: I0929 21:42:29.849240 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lr6zp\" (UniqueName: \"kubernetes.io/projected/0b2e8086-d33d-420a-8dea-5e892d02b5eb-kube-api-access-lr6zp\") pod \"barbican-api-7b8dc76f9b-7vhr7\" (UID: \"0b2e8086-d33d-420a-8dea-5e892d02b5eb\") " pod="openstack/barbican-api-7b8dc76f9b-7vhr7" Sep 29 21:42:29 crc kubenswrapper[4911]: I0929 21:42:29.947734 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7b8dc76f9b-7vhr7" Sep 29 21:42:30 crc kubenswrapper[4911]: I0929 21:42:30.068746 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Sep 29 21:42:30 crc kubenswrapper[4911]: W0929 21:42:30.084542 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3ca83d01_449e_43fd_a6e3_3a1da30ec45b.slice/crio-e9bd5a255278f277fb59611f5be379c7a88aacd329b76dc5622acb6edabb2d5f WatchSource:0}: Error finding container e9bd5a255278f277fb59611f5be379c7a88aacd329b76dc5622acb6edabb2d5f: Status 404 returned error can't find the container with id e9bd5a255278f277fb59611f5be379c7a88aacd329b76dc5622acb6edabb2d5f Sep 29 21:42:30 crc kubenswrapper[4911]: I0929 21:42:30.380415 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"3ca83d01-449e-43fd-a6e3-3a1da30ec45b","Type":"ContainerStarted","Data":"e9bd5a255278f277fb59611f5be379c7a88aacd329b76dc5622acb6edabb2d5f"} Sep 29 21:42:30 crc kubenswrapper[4911]: I0929 21:42:30.382668 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-5d79d56b48-nd52h" event={"ID":"1fee9300-cab0-441a-a47f-a5a1fc02c24d","Type":"ContainerStarted","Data":"8ce6fd90881f57497afc7b830d198109367054b49d443d66c6dfac9637448a73"} Sep 29 21:42:30 crc kubenswrapper[4911]: I0929 21:42:30.382711 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-5d79d56b48-nd52h" event={"ID":"1fee9300-cab0-441a-a47f-a5a1fc02c24d","Type":"ContainerStarted","Data":"4a2d8ea5856cd3e22665617fc35dcfc32dc9f5ccbeb032d8cb44495f04e2a21f"} Sep 29 21:42:30 crc kubenswrapper[4911]: I0929 21:42:30.385971 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-844ff9b7c7-s8vcn" event={"ID":"115e92e2-b175-4f4b-a9fe-cdd0b7d3d104","Type":"ContainerStarted","Data":"4abaf42b31a686696f0f98445ef5ff582c9081ae60189bbc466c559ca59985da"} Sep 29 21:42:30 crc kubenswrapper[4911]: I0929 21:42:30.386007 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-844ff9b7c7-s8vcn" event={"ID":"115e92e2-b175-4f4b-a9fe-cdd0b7d3d104","Type":"ContainerStarted","Data":"fa43a9c85c1260672095b6284ece10bedd7be8a4edd0b5352d2c76f72a929e77"} Sep 29 21:42:30 crc kubenswrapper[4911]: I0929 21:42:30.410758 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-5d79d56b48-nd52h" podStartSLOduration=2.398521165 podStartE2EDuration="4.410739156s" podCreationTimestamp="2025-09-29 21:42:26 +0000 UTC" firstStartedPulling="2025-09-29 21:42:27.559613316 +0000 UTC m=+1025.536725987" lastFinishedPulling="2025-09-29 21:42:29.571831307 +0000 UTC m=+1027.548943978" observedRunningTime="2025-09-29 21:42:30.399912752 +0000 UTC m=+1028.377025433" watchObservedRunningTime="2025-09-29 21:42:30.410739156 +0000 UTC m=+1028.387851827" Sep 29 21:42:30 crc kubenswrapper[4911]: I0929 21:42:30.433987 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-844ff9b7c7-s8vcn" podStartSLOduration=2.39832666 podStartE2EDuration="4.433968007s" podCreationTimestamp="2025-09-29 21:42:26 +0000 UTC" firstStartedPulling="2025-09-29 21:42:27.477037345 +0000 UTC m=+1025.454150016" lastFinishedPulling="2025-09-29 21:42:29.512678692 +0000 UTC m=+1027.489791363" observedRunningTime="2025-09-29 21:42:30.425331749 +0000 UTC m=+1028.402444420" watchObservedRunningTime="2025-09-29 21:42:30.433968007 +0000 UTC m=+1028.411080678" Sep 29 21:42:30 crc kubenswrapper[4911]: I0929 21:42:30.464112 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-7b8dc76f9b-7vhr7"] Sep 29 21:42:30 crc kubenswrapper[4911]: W0929 21:42:30.467220 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0b2e8086_d33d_420a_8dea_5e892d02b5eb.slice/crio-cd62a7063d7b4be498545e3ee9fd9a8f60a7690ca00c0ca766e1bc3d5f8c6b3c WatchSource:0}: Error finding container cd62a7063d7b4be498545e3ee9fd9a8f60a7690ca00c0ca766e1bc3d5f8c6b3c: Status 404 returned error can't find the container with id cd62a7063d7b4be498545e3ee9fd9a8f60a7690ca00c0ca766e1bc3d5f8c6b3c Sep 29 21:42:31 crc kubenswrapper[4911]: I0929 21:42:31.417191 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7b8dc76f9b-7vhr7" event={"ID":"0b2e8086-d33d-420a-8dea-5e892d02b5eb","Type":"ContainerStarted","Data":"adfb4d8aa48f6ba88f6bc85ec5bb8426b9b449b5ea7ef0225301cabab4f819de"} Sep 29 21:42:31 crc kubenswrapper[4911]: I0929 21:42:31.418231 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7b8dc76f9b-7vhr7" event={"ID":"0b2e8086-d33d-420a-8dea-5e892d02b5eb","Type":"ContainerStarted","Data":"fa110f0345592edf73b9e3925d777bf05e343f3053f8d1255fbe0fda68f0d7e8"} Sep 29 21:42:31 crc kubenswrapper[4911]: I0929 21:42:31.418364 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7b8dc76f9b-7vhr7" event={"ID":"0b2e8086-d33d-420a-8dea-5e892d02b5eb","Type":"ContainerStarted","Data":"cd62a7063d7b4be498545e3ee9fd9a8f60a7690ca00c0ca766e1bc3d5f8c6b3c"} Sep 29 21:42:31 crc kubenswrapper[4911]: I0929 21:42:31.418473 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-7b8dc76f9b-7vhr7" Sep 29 21:42:31 crc kubenswrapper[4911]: I0929 21:42:31.418542 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-7b8dc76f9b-7vhr7" Sep 29 21:42:31 crc kubenswrapper[4911]: I0929 21:42:31.422944 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-m4r9b" event={"ID":"3b39a884-4bfd-4927-af16-6ce025d131fc","Type":"ContainerStarted","Data":"64d76e2842031af1c9210cb8228a1e805574730be2e219ab06de0ff0343b246d"} Sep 29 21:42:31 crc kubenswrapper[4911]: I0929 21:42:31.443395 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-7b8dc76f9b-7vhr7" podStartSLOduration=2.443377715 podStartE2EDuration="2.443377715s" podCreationTimestamp="2025-09-29 21:42:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:42:31.434721307 +0000 UTC m=+1029.411833978" watchObservedRunningTime="2025-09-29 21:42:31.443377715 +0000 UTC m=+1029.420490386" Sep 29 21:42:31 crc kubenswrapper[4911]: I0929 21:42:31.457975 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-m4r9b" podStartSLOduration=3.6082990759999998 podStartE2EDuration="37.457957237s" podCreationTimestamp="2025-09-29 21:41:54 +0000 UTC" firstStartedPulling="2025-09-29 21:41:55.682083283 +0000 UTC m=+993.659195954" lastFinishedPulling="2025-09-29 21:42:29.531741444 +0000 UTC m=+1027.508854115" observedRunningTime="2025-09-29 21:42:31.452136587 +0000 UTC m=+1029.429249258" watchObservedRunningTime="2025-09-29 21:42:31.457957237 +0000 UTC m=+1029.435069908" Sep 29 21:42:33 crc kubenswrapper[4911]: I0929 21:42:33.678150 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-c9f6bd49f-qt4wx"] Sep 29 21:42:33 crc kubenswrapper[4911]: I0929 21:42:33.679773 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-c9f6bd49f-qt4wx" Sep 29 21:42:33 crc kubenswrapper[4911]: I0929 21:42:33.684361 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Sep 29 21:42:33 crc kubenswrapper[4911]: I0929 21:42:33.684370 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Sep 29 21:42:33 crc kubenswrapper[4911]: I0929 21:42:33.684466 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Sep 29 21:42:33 crc kubenswrapper[4911]: I0929 21:42:33.695814 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-c9f6bd49f-qt4wx"] Sep 29 21:42:33 crc kubenswrapper[4911]: I0929 21:42:33.803848 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5a0d597e-d509-41b7-839f-3b4b76863ab8-run-httpd\") pod \"swift-proxy-c9f6bd49f-qt4wx\" (UID: \"5a0d597e-d509-41b7-839f-3b4b76863ab8\") " pod="openstack/swift-proxy-c9f6bd49f-qt4wx" Sep 29 21:42:33 crc kubenswrapper[4911]: I0929 21:42:33.803900 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t7dvv\" (UniqueName: \"kubernetes.io/projected/5a0d597e-d509-41b7-839f-3b4b76863ab8-kube-api-access-t7dvv\") pod \"swift-proxy-c9f6bd49f-qt4wx\" (UID: \"5a0d597e-d509-41b7-839f-3b4b76863ab8\") " pod="openstack/swift-proxy-c9f6bd49f-qt4wx" Sep 29 21:42:33 crc kubenswrapper[4911]: I0929 21:42:33.803925 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a0d597e-d509-41b7-839f-3b4b76863ab8-config-data\") pod \"swift-proxy-c9f6bd49f-qt4wx\" (UID: \"5a0d597e-d509-41b7-839f-3b4b76863ab8\") " pod="openstack/swift-proxy-c9f6bd49f-qt4wx" Sep 29 21:42:33 crc kubenswrapper[4911]: I0929 21:42:33.803977 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5a0d597e-d509-41b7-839f-3b4b76863ab8-log-httpd\") pod \"swift-proxy-c9f6bd49f-qt4wx\" (UID: \"5a0d597e-d509-41b7-839f-3b4b76863ab8\") " pod="openstack/swift-proxy-c9f6bd49f-qt4wx" Sep 29 21:42:33 crc kubenswrapper[4911]: I0929 21:42:33.803992 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5a0d597e-d509-41b7-839f-3b4b76863ab8-public-tls-certs\") pod \"swift-proxy-c9f6bd49f-qt4wx\" (UID: \"5a0d597e-d509-41b7-839f-3b4b76863ab8\") " pod="openstack/swift-proxy-c9f6bd49f-qt4wx" Sep 29 21:42:33 crc kubenswrapper[4911]: I0929 21:42:33.804026 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/5a0d597e-d509-41b7-839f-3b4b76863ab8-etc-swift\") pod \"swift-proxy-c9f6bd49f-qt4wx\" (UID: \"5a0d597e-d509-41b7-839f-3b4b76863ab8\") " pod="openstack/swift-proxy-c9f6bd49f-qt4wx" Sep 29 21:42:33 crc kubenswrapper[4911]: I0929 21:42:33.804401 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a0d597e-d509-41b7-839f-3b4b76863ab8-combined-ca-bundle\") pod \"swift-proxy-c9f6bd49f-qt4wx\" (UID: \"5a0d597e-d509-41b7-839f-3b4b76863ab8\") " pod="openstack/swift-proxy-c9f6bd49f-qt4wx" Sep 29 21:42:33 crc kubenswrapper[4911]: I0929 21:42:33.804492 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5a0d597e-d509-41b7-839f-3b4b76863ab8-internal-tls-certs\") pod \"swift-proxy-c9f6bd49f-qt4wx\" (UID: \"5a0d597e-d509-41b7-839f-3b4b76863ab8\") " pod="openstack/swift-proxy-c9f6bd49f-qt4wx" Sep 29 21:42:33 crc kubenswrapper[4911]: I0929 21:42:33.905891 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/5a0d597e-d509-41b7-839f-3b4b76863ab8-etc-swift\") pod \"swift-proxy-c9f6bd49f-qt4wx\" (UID: \"5a0d597e-d509-41b7-839f-3b4b76863ab8\") " pod="openstack/swift-proxy-c9f6bd49f-qt4wx" Sep 29 21:42:33 crc kubenswrapper[4911]: I0929 21:42:33.905955 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a0d597e-d509-41b7-839f-3b4b76863ab8-combined-ca-bundle\") pod \"swift-proxy-c9f6bd49f-qt4wx\" (UID: \"5a0d597e-d509-41b7-839f-3b4b76863ab8\") " pod="openstack/swift-proxy-c9f6bd49f-qt4wx" Sep 29 21:42:33 crc kubenswrapper[4911]: I0929 21:42:33.905987 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5a0d597e-d509-41b7-839f-3b4b76863ab8-internal-tls-certs\") pod \"swift-proxy-c9f6bd49f-qt4wx\" (UID: \"5a0d597e-d509-41b7-839f-3b4b76863ab8\") " pod="openstack/swift-proxy-c9f6bd49f-qt4wx" Sep 29 21:42:33 crc kubenswrapper[4911]: I0929 21:42:33.906061 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5a0d597e-d509-41b7-839f-3b4b76863ab8-run-httpd\") pod \"swift-proxy-c9f6bd49f-qt4wx\" (UID: \"5a0d597e-d509-41b7-839f-3b4b76863ab8\") " pod="openstack/swift-proxy-c9f6bd49f-qt4wx" Sep 29 21:42:33 crc kubenswrapper[4911]: I0929 21:42:33.906087 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t7dvv\" (UniqueName: \"kubernetes.io/projected/5a0d597e-d509-41b7-839f-3b4b76863ab8-kube-api-access-t7dvv\") pod \"swift-proxy-c9f6bd49f-qt4wx\" (UID: \"5a0d597e-d509-41b7-839f-3b4b76863ab8\") " pod="openstack/swift-proxy-c9f6bd49f-qt4wx" Sep 29 21:42:33 crc kubenswrapper[4911]: I0929 21:42:33.906109 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a0d597e-d509-41b7-839f-3b4b76863ab8-config-data\") pod \"swift-proxy-c9f6bd49f-qt4wx\" (UID: \"5a0d597e-d509-41b7-839f-3b4b76863ab8\") " pod="openstack/swift-proxy-c9f6bd49f-qt4wx" Sep 29 21:42:33 crc kubenswrapper[4911]: I0929 21:42:33.906151 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5a0d597e-d509-41b7-839f-3b4b76863ab8-log-httpd\") pod \"swift-proxy-c9f6bd49f-qt4wx\" (UID: \"5a0d597e-d509-41b7-839f-3b4b76863ab8\") " pod="openstack/swift-proxy-c9f6bd49f-qt4wx" Sep 29 21:42:33 crc kubenswrapper[4911]: I0929 21:42:33.906168 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5a0d597e-d509-41b7-839f-3b4b76863ab8-public-tls-certs\") pod \"swift-proxy-c9f6bd49f-qt4wx\" (UID: \"5a0d597e-d509-41b7-839f-3b4b76863ab8\") " pod="openstack/swift-proxy-c9f6bd49f-qt4wx" Sep 29 21:42:33 crc kubenswrapper[4911]: I0929 21:42:33.906772 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5a0d597e-d509-41b7-839f-3b4b76863ab8-run-httpd\") pod \"swift-proxy-c9f6bd49f-qt4wx\" (UID: \"5a0d597e-d509-41b7-839f-3b4b76863ab8\") " pod="openstack/swift-proxy-c9f6bd49f-qt4wx" Sep 29 21:42:33 crc kubenswrapper[4911]: I0929 21:42:33.906806 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5a0d597e-d509-41b7-839f-3b4b76863ab8-log-httpd\") pod \"swift-proxy-c9f6bd49f-qt4wx\" (UID: \"5a0d597e-d509-41b7-839f-3b4b76863ab8\") " pod="openstack/swift-proxy-c9f6bd49f-qt4wx" Sep 29 21:42:33 crc kubenswrapper[4911]: I0929 21:42:33.912585 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a0d597e-d509-41b7-839f-3b4b76863ab8-config-data\") pod \"swift-proxy-c9f6bd49f-qt4wx\" (UID: \"5a0d597e-d509-41b7-839f-3b4b76863ab8\") " pod="openstack/swift-proxy-c9f6bd49f-qt4wx" Sep 29 21:42:33 crc kubenswrapper[4911]: I0929 21:42:33.912985 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/5a0d597e-d509-41b7-839f-3b4b76863ab8-etc-swift\") pod \"swift-proxy-c9f6bd49f-qt4wx\" (UID: \"5a0d597e-d509-41b7-839f-3b4b76863ab8\") " pod="openstack/swift-proxy-c9f6bd49f-qt4wx" Sep 29 21:42:33 crc kubenswrapper[4911]: I0929 21:42:33.915399 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5a0d597e-d509-41b7-839f-3b4b76863ab8-internal-tls-certs\") pod \"swift-proxy-c9f6bd49f-qt4wx\" (UID: \"5a0d597e-d509-41b7-839f-3b4b76863ab8\") " pod="openstack/swift-proxy-c9f6bd49f-qt4wx" Sep 29 21:42:33 crc kubenswrapper[4911]: I0929 21:42:33.916944 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a0d597e-d509-41b7-839f-3b4b76863ab8-combined-ca-bundle\") pod \"swift-proxy-c9f6bd49f-qt4wx\" (UID: \"5a0d597e-d509-41b7-839f-3b4b76863ab8\") " pod="openstack/swift-proxy-c9f6bd49f-qt4wx" Sep 29 21:42:33 crc kubenswrapper[4911]: I0929 21:42:33.917465 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5a0d597e-d509-41b7-839f-3b4b76863ab8-public-tls-certs\") pod \"swift-proxy-c9f6bd49f-qt4wx\" (UID: \"5a0d597e-d509-41b7-839f-3b4b76863ab8\") " pod="openstack/swift-proxy-c9f6bd49f-qt4wx" Sep 29 21:42:33 crc kubenswrapper[4911]: I0929 21:42:33.928772 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t7dvv\" (UniqueName: \"kubernetes.io/projected/5a0d597e-d509-41b7-839f-3b4b76863ab8-kube-api-access-t7dvv\") pod \"swift-proxy-c9f6bd49f-qt4wx\" (UID: \"5a0d597e-d509-41b7-839f-3b4b76863ab8\") " pod="openstack/swift-proxy-c9f6bd49f-qt4wx" Sep 29 21:42:33 crc kubenswrapper[4911]: I0929 21:42:33.995951 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-c9f6bd49f-qt4wx" Sep 29 21:42:34 crc kubenswrapper[4911]: I0929 21:42:34.225693 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 21:42:34 crc kubenswrapper[4911]: I0929 21:42:34.226276 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b07b47db-f8f0-4357-8bcd-4d3bef88f8d4" containerName="ceilometer-central-agent" containerID="cri-o://2614ee3255dbe426372dbbffdeb85a2af5751103c893a0cbca05d0767133e367" gracePeriod=30 Sep 29 21:42:34 crc kubenswrapper[4911]: I0929 21:42:34.226774 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b07b47db-f8f0-4357-8bcd-4d3bef88f8d4" containerName="proxy-httpd" containerID="cri-o://23fbd4c167971799adbe00d529a6565810abfa781e566dbf6dd6f75673928a12" gracePeriod=30 Sep 29 21:42:34 crc kubenswrapper[4911]: I0929 21:42:34.227212 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b07b47db-f8f0-4357-8bcd-4d3bef88f8d4" containerName="sg-core" containerID="cri-o://41e8ec04a9b98666109628dbbf6818a83306ab02707f9c132c952dc52f25c03e" gracePeriod=30 Sep 29 21:42:34 crc kubenswrapper[4911]: I0929 21:42:34.227259 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b07b47db-f8f0-4357-8bcd-4d3bef88f8d4" containerName="ceilometer-notification-agent" containerID="cri-o://8fa39b93171fc756c0a52e678b164b1c533b7e74bc5c7a1f12b1e9088acbf304" gracePeriod=30 Sep 29 21:42:34 crc kubenswrapper[4911]: I0929 21:42:34.372504 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-79b7f696f8-zxrwh" Sep 29 21:42:34 crc kubenswrapper[4911]: I0929 21:42:34.453102 4911 generic.go:334] "Generic (PLEG): container finished" podID="b07b47db-f8f0-4357-8bcd-4d3bef88f8d4" containerID="23fbd4c167971799adbe00d529a6565810abfa781e566dbf6dd6f75673928a12" exitCode=0 Sep 29 21:42:34 crc kubenswrapper[4911]: I0929 21:42:34.453136 4911 generic.go:334] "Generic (PLEG): container finished" podID="b07b47db-f8f0-4357-8bcd-4d3bef88f8d4" containerID="41e8ec04a9b98666109628dbbf6818a83306ab02707f9c132c952dc52f25c03e" exitCode=2 Sep 29 21:42:34 crc kubenswrapper[4911]: I0929 21:42:34.453157 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b07b47db-f8f0-4357-8bcd-4d3bef88f8d4","Type":"ContainerDied","Data":"23fbd4c167971799adbe00d529a6565810abfa781e566dbf6dd6f75673928a12"} Sep 29 21:42:34 crc kubenswrapper[4911]: I0929 21:42:34.453180 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b07b47db-f8f0-4357-8bcd-4d3bef88f8d4","Type":"ContainerDied","Data":"41e8ec04a9b98666109628dbbf6818a83306ab02707f9c132c952dc52f25c03e"} Sep 29 21:42:34 crc kubenswrapper[4911]: I0929 21:42:34.649811 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-c9f6bd49f-qt4wx"] Sep 29 21:42:34 crc kubenswrapper[4911]: W0929 21:42:34.677015 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5a0d597e_d509_41b7_839f_3b4b76863ab8.slice/crio-282105da59b573c880967855449d86fc9bd01c163a72cc9e08b9a6bc102ba094 WatchSource:0}: Error finding container 282105da59b573c880967855449d86fc9bd01c163a72cc9e08b9a6bc102ba094: Status 404 returned error can't find the container with id 282105da59b573c880967855449d86fc9bd01c163a72cc9e08b9a6bc102ba094 Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.169888 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.329042 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b07b47db-f8f0-4357-8bcd-4d3bef88f8d4-combined-ca-bundle\") pod \"b07b47db-f8f0-4357-8bcd-4d3bef88f8d4\" (UID: \"b07b47db-f8f0-4357-8bcd-4d3bef88f8d4\") " Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.329362 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b07b47db-f8f0-4357-8bcd-4d3bef88f8d4-run-httpd\") pod \"b07b47db-f8f0-4357-8bcd-4d3bef88f8d4\" (UID: \"b07b47db-f8f0-4357-8bcd-4d3bef88f8d4\") " Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.329450 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b07b47db-f8f0-4357-8bcd-4d3bef88f8d4-sg-core-conf-yaml\") pod \"b07b47db-f8f0-4357-8bcd-4d3bef88f8d4\" (UID: \"b07b47db-f8f0-4357-8bcd-4d3bef88f8d4\") " Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.329758 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b07b47db-f8f0-4357-8bcd-4d3bef88f8d4-config-data\") pod \"b07b47db-f8f0-4357-8bcd-4d3bef88f8d4\" (UID: \"b07b47db-f8f0-4357-8bcd-4d3bef88f8d4\") " Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.329808 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dxp7f\" (UniqueName: \"kubernetes.io/projected/b07b47db-f8f0-4357-8bcd-4d3bef88f8d4-kube-api-access-dxp7f\") pod \"b07b47db-f8f0-4357-8bcd-4d3bef88f8d4\" (UID: \"b07b47db-f8f0-4357-8bcd-4d3bef88f8d4\") " Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.329829 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b07b47db-f8f0-4357-8bcd-4d3bef88f8d4-scripts\") pod \"b07b47db-f8f0-4357-8bcd-4d3bef88f8d4\" (UID: \"b07b47db-f8f0-4357-8bcd-4d3bef88f8d4\") " Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.329889 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b07b47db-f8f0-4357-8bcd-4d3bef88f8d4-log-httpd\") pod \"b07b47db-f8f0-4357-8bcd-4d3bef88f8d4\" (UID: \"b07b47db-f8f0-4357-8bcd-4d3bef88f8d4\") " Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.329946 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b07b47db-f8f0-4357-8bcd-4d3bef88f8d4-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "b07b47db-f8f0-4357-8bcd-4d3bef88f8d4" (UID: "b07b47db-f8f0-4357-8bcd-4d3bef88f8d4"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.330418 4911 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b07b47db-f8f0-4357-8bcd-4d3bef88f8d4-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.330622 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b07b47db-f8f0-4357-8bcd-4d3bef88f8d4-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "b07b47db-f8f0-4357-8bcd-4d3bef88f8d4" (UID: "b07b47db-f8f0-4357-8bcd-4d3bef88f8d4"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.333114 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b07b47db-f8f0-4357-8bcd-4d3bef88f8d4-scripts" (OuterVolumeSpecName: "scripts") pod "b07b47db-f8f0-4357-8bcd-4d3bef88f8d4" (UID: "b07b47db-f8f0-4357-8bcd-4d3bef88f8d4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.333332 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b07b47db-f8f0-4357-8bcd-4d3bef88f8d4-kube-api-access-dxp7f" (OuterVolumeSpecName: "kube-api-access-dxp7f") pod "b07b47db-f8f0-4357-8bcd-4d3bef88f8d4" (UID: "b07b47db-f8f0-4357-8bcd-4d3bef88f8d4"). InnerVolumeSpecName "kube-api-access-dxp7f". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.353899 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b07b47db-f8f0-4357-8bcd-4d3bef88f8d4-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "b07b47db-f8f0-4357-8bcd-4d3bef88f8d4" (UID: "b07b47db-f8f0-4357-8bcd-4d3bef88f8d4"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.432288 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dxp7f\" (UniqueName: \"kubernetes.io/projected/b07b47db-f8f0-4357-8bcd-4d3bef88f8d4-kube-api-access-dxp7f\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.432327 4911 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b07b47db-f8f0-4357-8bcd-4d3bef88f8d4-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.432340 4911 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b07b47db-f8f0-4357-8bcd-4d3bef88f8d4-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.432352 4911 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b07b47db-f8f0-4357-8bcd-4d3bef88f8d4-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.433099 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b07b47db-f8f0-4357-8bcd-4d3bef88f8d4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b07b47db-f8f0-4357-8bcd-4d3bef88f8d4" (UID: "b07b47db-f8f0-4357-8bcd-4d3bef88f8d4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.443050 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b07b47db-f8f0-4357-8bcd-4d3bef88f8d4-config-data" (OuterVolumeSpecName: "config-data") pod "b07b47db-f8f0-4357-8bcd-4d3bef88f8d4" (UID: "b07b47db-f8f0-4357-8bcd-4d3bef88f8d4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.464157 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-c9f6bd49f-qt4wx" event={"ID":"5a0d597e-d509-41b7-839f-3b4b76863ab8","Type":"ContainerStarted","Data":"319c6ba88d6fa349d61fa352b5a7dcdf0115f3b1922889b9f2f98d4362bc8f17"} Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.464218 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-c9f6bd49f-qt4wx" event={"ID":"5a0d597e-d509-41b7-839f-3b4b76863ab8","Type":"ContainerStarted","Data":"b2df2353d1f352414d9d7c8f45ef81ea0d417047025be780139bfd691aab2a1b"} Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.464244 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-c9f6bd49f-qt4wx" event={"ID":"5a0d597e-d509-41b7-839f-3b4b76863ab8","Type":"ContainerStarted","Data":"282105da59b573c880967855449d86fc9bd01c163a72cc9e08b9a6bc102ba094"} Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.464295 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-c9f6bd49f-qt4wx" Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.469589 4911 generic.go:334] "Generic (PLEG): container finished" podID="b07b47db-f8f0-4357-8bcd-4d3bef88f8d4" containerID="8fa39b93171fc756c0a52e678b164b1c533b7e74bc5c7a1f12b1e9088acbf304" exitCode=0 Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.469623 4911 generic.go:334] "Generic (PLEG): container finished" podID="b07b47db-f8f0-4357-8bcd-4d3bef88f8d4" containerID="2614ee3255dbe426372dbbffdeb85a2af5751103c893a0cbca05d0767133e367" exitCode=0 Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.469665 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b07b47db-f8f0-4357-8bcd-4d3bef88f8d4","Type":"ContainerDied","Data":"8fa39b93171fc756c0a52e678b164b1c533b7e74bc5c7a1f12b1e9088acbf304"} Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.469690 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b07b47db-f8f0-4357-8bcd-4d3bef88f8d4","Type":"ContainerDied","Data":"2614ee3255dbe426372dbbffdeb85a2af5751103c893a0cbca05d0767133e367"} Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.469701 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b07b47db-f8f0-4357-8bcd-4d3bef88f8d4","Type":"ContainerDied","Data":"2eed44f67217e187e8f570440b9ac579d6cdcb39fb319e787e4bc432681e9b9c"} Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.469716 4911 scope.go:117] "RemoveContainer" containerID="23fbd4c167971799adbe00d529a6565810abfa781e566dbf6dd6f75673928a12" Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.469880 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.472684 4911 generic.go:334] "Generic (PLEG): container finished" podID="3b39a884-4bfd-4927-af16-6ce025d131fc" containerID="64d76e2842031af1c9210cb8228a1e805574730be2e219ab06de0ff0343b246d" exitCode=0 Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.472725 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-m4r9b" event={"ID":"3b39a884-4bfd-4927-af16-6ce025d131fc","Type":"ContainerDied","Data":"64d76e2842031af1c9210cb8228a1e805574730be2e219ab06de0ff0343b246d"} Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.493727 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-c9f6bd49f-qt4wx" podStartSLOduration=2.493706661 podStartE2EDuration="2.493706661s" podCreationTimestamp="2025-09-29 21:42:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:42:35.488179859 +0000 UTC m=+1033.465292550" watchObservedRunningTime="2025-09-29 21:42:35.493706661 +0000 UTC m=+1033.470819342" Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.534547 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b07b47db-f8f0-4357-8bcd-4d3bef88f8d4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.534930 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b07b47db-f8f0-4357-8bcd-4d3bef88f8d4-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.582104 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.592220 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.600872 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 29 21:42:35 crc kubenswrapper[4911]: E0929 21:42:35.601344 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b07b47db-f8f0-4357-8bcd-4d3bef88f8d4" containerName="ceilometer-central-agent" Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.601369 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="b07b47db-f8f0-4357-8bcd-4d3bef88f8d4" containerName="ceilometer-central-agent" Sep 29 21:42:35 crc kubenswrapper[4911]: E0929 21:42:35.601397 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b07b47db-f8f0-4357-8bcd-4d3bef88f8d4" containerName="sg-core" Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.601406 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="b07b47db-f8f0-4357-8bcd-4d3bef88f8d4" containerName="sg-core" Sep 29 21:42:35 crc kubenswrapper[4911]: E0929 21:42:35.601444 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b07b47db-f8f0-4357-8bcd-4d3bef88f8d4" containerName="ceilometer-notification-agent" Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.601453 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="b07b47db-f8f0-4357-8bcd-4d3bef88f8d4" containerName="ceilometer-notification-agent" Sep 29 21:42:35 crc kubenswrapper[4911]: E0929 21:42:35.601468 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b07b47db-f8f0-4357-8bcd-4d3bef88f8d4" containerName="proxy-httpd" Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.601473 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="b07b47db-f8f0-4357-8bcd-4d3bef88f8d4" containerName="proxy-httpd" Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.601647 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="b07b47db-f8f0-4357-8bcd-4d3bef88f8d4" containerName="proxy-httpd" Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.601656 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="b07b47db-f8f0-4357-8bcd-4d3bef88f8d4" containerName="sg-core" Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.601680 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="b07b47db-f8f0-4357-8bcd-4d3bef88f8d4" containerName="ceilometer-central-agent" Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.601690 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="b07b47db-f8f0-4357-8bcd-4d3bef88f8d4" containerName="ceilometer-notification-agent" Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.603735 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.607291 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.607738 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.614362 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.738410 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/615c8a3f-a4f7-4017-aa61-d2525148b6cd-log-httpd\") pod \"ceilometer-0\" (UID: \"615c8a3f-a4f7-4017-aa61-d2525148b6cd\") " pod="openstack/ceilometer-0" Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.738462 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/615c8a3f-a4f7-4017-aa61-d2525148b6cd-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"615c8a3f-a4f7-4017-aa61-d2525148b6cd\") " pod="openstack/ceilometer-0" Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.738486 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/615c8a3f-a4f7-4017-aa61-d2525148b6cd-config-data\") pod \"ceilometer-0\" (UID: \"615c8a3f-a4f7-4017-aa61-d2525148b6cd\") " pod="openstack/ceilometer-0" Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.738511 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zcsls\" (UniqueName: \"kubernetes.io/projected/615c8a3f-a4f7-4017-aa61-d2525148b6cd-kube-api-access-zcsls\") pod \"ceilometer-0\" (UID: \"615c8a3f-a4f7-4017-aa61-d2525148b6cd\") " pod="openstack/ceilometer-0" Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.738582 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/615c8a3f-a4f7-4017-aa61-d2525148b6cd-scripts\") pod \"ceilometer-0\" (UID: \"615c8a3f-a4f7-4017-aa61-d2525148b6cd\") " pod="openstack/ceilometer-0" Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.738623 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/615c8a3f-a4f7-4017-aa61-d2525148b6cd-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"615c8a3f-a4f7-4017-aa61-d2525148b6cd\") " pod="openstack/ceilometer-0" Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.738637 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/615c8a3f-a4f7-4017-aa61-d2525148b6cd-run-httpd\") pod \"ceilometer-0\" (UID: \"615c8a3f-a4f7-4017-aa61-d2525148b6cd\") " pod="openstack/ceilometer-0" Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.839954 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/615c8a3f-a4f7-4017-aa61-d2525148b6cd-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"615c8a3f-a4f7-4017-aa61-d2525148b6cd\") " pod="openstack/ceilometer-0" Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.839995 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/615c8a3f-a4f7-4017-aa61-d2525148b6cd-run-httpd\") pod \"ceilometer-0\" (UID: \"615c8a3f-a4f7-4017-aa61-d2525148b6cd\") " pod="openstack/ceilometer-0" Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.840085 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/615c8a3f-a4f7-4017-aa61-d2525148b6cd-log-httpd\") pod \"ceilometer-0\" (UID: \"615c8a3f-a4f7-4017-aa61-d2525148b6cd\") " pod="openstack/ceilometer-0" Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.840118 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/615c8a3f-a4f7-4017-aa61-d2525148b6cd-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"615c8a3f-a4f7-4017-aa61-d2525148b6cd\") " pod="openstack/ceilometer-0" Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.840141 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/615c8a3f-a4f7-4017-aa61-d2525148b6cd-config-data\") pod \"ceilometer-0\" (UID: \"615c8a3f-a4f7-4017-aa61-d2525148b6cd\") " pod="openstack/ceilometer-0" Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.840160 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zcsls\" (UniqueName: \"kubernetes.io/projected/615c8a3f-a4f7-4017-aa61-d2525148b6cd-kube-api-access-zcsls\") pod \"ceilometer-0\" (UID: \"615c8a3f-a4f7-4017-aa61-d2525148b6cd\") " pod="openstack/ceilometer-0" Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.840200 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/615c8a3f-a4f7-4017-aa61-d2525148b6cd-scripts\") pod \"ceilometer-0\" (UID: \"615c8a3f-a4f7-4017-aa61-d2525148b6cd\") " pod="openstack/ceilometer-0" Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.841065 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/615c8a3f-a4f7-4017-aa61-d2525148b6cd-log-httpd\") pod \"ceilometer-0\" (UID: \"615c8a3f-a4f7-4017-aa61-d2525148b6cd\") " pod="openstack/ceilometer-0" Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.841663 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/615c8a3f-a4f7-4017-aa61-d2525148b6cd-run-httpd\") pod \"ceilometer-0\" (UID: \"615c8a3f-a4f7-4017-aa61-d2525148b6cd\") " pod="openstack/ceilometer-0" Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.844078 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/615c8a3f-a4f7-4017-aa61-d2525148b6cd-scripts\") pod \"ceilometer-0\" (UID: \"615c8a3f-a4f7-4017-aa61-d2525148b6cd\") " pod="openstack/ceilometer-0" Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.844670 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/615c8a3f-a4f7-4017-aa61-d2525148b6cd-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"615c8a3f-a4f7-4017-aa61-d2525148b6cd\") " pod="openstack/ceilometer-0" Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.845602 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/615c8a3f-a4f7-4017-aa61-d2525148b6cd-config-data\") pod \"ceilometer-0\" (UID: \"615c8a3f-a4f7-4017-aa61-d2525148b6cd\") " pod="openstack/ceilometer-0" Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.848645 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/615c8a3f-a4f7-4017-aa61-d2525148b6cd-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"615c8a3f-a4f7-4017-aa61-d2525148b6cd\") " pod="openstack/ceilometer-0" Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.858181 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zcsls\" (UniqueName: \"kubernetes.io/projected/615c8a3f-a4f7-4017-aa61-d2525148b6cd-kube-api-access-zcsls\") pod \"ceilometer-0\" (UID: \"615c8a3f-a4f7-4017-aa61-d2525148b6cd\") " pod="openstack/ceilometer-0" Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.921328 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-79b7f696f8-zxrwh" Sep 29 21:42:35 crc kubenswrapper[4911]: I0929 21:42:35.927201 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 21:42:36 crc kubenswrapper[4911]: I0929 21:42:36.492945 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-c9f6bd49f-qt4wx" Sep 29 21:42:36 crc kubenswrapper[4911]: I0929 21:42:36.729387 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b07b47db-f8f0-4357-8bcd-4d3bef88f8d4" path="/var/lib/kubelet/pods/b07b47db-f8f0-4357-8bcd-4d3bef88f8d4/volumes" Sep 29 21:42:37 crc kubenswrapper[4911]: I0929 21:42:37.172029 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-75c8ddd69c-jqmrp" Sep 29 21:42:37 crc kubenswrapper[4911]: I0929 21:42:37.235513 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-pwvb6"] Sep 29 21:42:37 crc kubenswrapper[4911]: I0929 21:42:37.235773 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-8b5c85b87-pwvb6" podUID="7dee5647-4d60-4283-b5a0-79cc059d340f" containerName="dnsmasq-dns" containerID="cri-o://b46ef6c12a3a0cdc0f13eebd9f70a13e6d26a43aa068cbbed851462d693e8578" gracePeriod=10 Sep 29 21:42:37 crc kubenswrapper[4911]: I0929 21:42:37.523901 4911 generic.go:334] "Generic (PLEG): container finished" podID="7dee5647-4d60-4283-b5a0-79cc059d340f" containerID="b46ef6c12a3a0cdc0f13eebd9f70a13e6d26a43aa068cbbed851462d693e8578" exitCode=0 Sep 29 21:42:37 crc kubenswrapper[4911]: I0929 21:42:37.524248 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b5c85b87-pwvb6" event={"ID":"7dee5647-4d60-4283-b5a0-79cc059d340f","Type":"ContainerDied","Data":"b46ef6c12a3a0cdc0f13eebd9f70a13e6d26a43aa068cbbed851462d693e8578"} Sep 29 21:42:39 crc kubenswrapper[4911]: I0929 21:42:39.330763 4911 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-8b5c85b87-pwvb6" podUID="7dee5647-4d60-4283-b5a0-79cc059d340f" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.139:5353: connect: connection refused" Sep 29 21:42:41 crc kubenswrapper[4911]: I0929 21:42:41.510040 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-7b8dc76f9b-7vhr7" Sep 29 21:42:41 crc kubenswrapper[4911]: I0929 21:42:41.540380 4911 scope.go:117] "RemoveContainer" containerID="41e8ec04a9b98666109628dbbf6818a83306ab02707f9c132c952dc52f25c03e" Sep 29 21:42:41 crc kubenswrapper[4911]: I0929 21:42:41.571387 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-m4r9b" event={"ID":"3b39a884-4bfd-4927-af16-6ce025d131fc","Type":"ContainerDied","Data":"05f8d100d16ebda6b16163bd410a9a45ca832799f040959f4736c3d46147db4a"} Sep 29 21:42:41 crc kubenswrapper[4911]: I0929 21:42:41.571638 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="05f8d100d16ebda6b16163bd410a9a45ca832799f040959f4736c3d46147db4a" Sep 29 21:42:41 crc kubenswrapper[4911]: I0929 21:42:41.591705 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-7b8dc76f9b-7vhr7" Sep 29 21:42:41 crc kubenswrapper[4911]: I0929 21:42:41.692444 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-79b7f696f8-zxrwh"] Sep 29 21:42:41 crc kubenswrapper[4911]: I0929 21:42:41.693125 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-79b7f696f8-zxrwh" podUID="40533d70-c10c-4b0e-b870-f50e423ccdc5" containerName="barbican-api-log" containerID="cri-o://c74c12f3e3fbd37473edcf5cbd3e6907e4c97317591e7796d4c7d9e742e6cb44" gracePeriod=30 Sep 29 21:42:41 crc kubenswrapper[4911]: I0929 21:42:41.694650 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-79b7f696f8-zxrwh" podUID="40533d70-c10c-4b0e-b870-f50e423ccdc5" containerName="barbican-api" containerID="cri-o://d9a05df345f8f6c2fb47fc2effbfb55613e3e6d2fb36ce8168c9188bc9bacb3e" gracePeriod=30 Sep 29 21:42:41 crc kubenswrapper[4911]: I0929 21:42:41.753674 4911 scope.go:117] "RemoveContainer" containerID="8fa39b93171fc756c0a52e678b164b1c533b7e74bc5c7a1f12b1e9088acbf304" Sep 29 21:42:41 crc kubenswrapper[4911]: I0929 21:42:41.778901 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-m4r9b" Sep 29 21:42:41 crc kubenswrapper[4911]: I0929 21:42:41.885969 4911 scope.go:117] "RemoveContainer" containerID="2614ee3255dbe426372dbbffdeb85a2af5751103c893a0cbca05d0767133e367" Sep 29 21:42:41 crc kubenswrapper[4911]: I0929 21:42:41.906912 4911 scope.go:117] "RemoveContainer" containerID="23fbd4c167971799adbe00d529a6565810abfa781e566dbf6dd6f75673928a12" Sep 29 21:42:41 crc kubenswrapper[4911]: E0929 21:42:41.908515 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"23fbd4c167971799adbe00d529a6565810abfa781e566dbf6dd6f75673928a12\": container with ID starting with 23fbd4c167971799adbe00d529a6565810abfa781e566dbf6dd6f75673928a12 not found: ID does not exist" containerID="23fbd4c167971799adbe00d529a6565810abfa781e566dbf6dd6f75673928a12" Sep 29 21:42:41 crc kubenswrapper[4911]: I0929 21:42:41.908591 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"23fbd4c167971799adbe00d529a6565810abfa781e566dbf6dd6f75673928a12"} err="failed to get container status \"23fbd4c167971799adbe00d529a6565810abfa781e566dbf6dd6f75673928a12\": rpc error: code = NotFound desc = could not find container \"23fbd4c167971799adbe00d529a6565810abfa781e566dbf6dd6f75673928a12\": container with ID starting with 23fbd4c167971799adbe00d529a6565810abfa781e566dbf6dd6f75673928a12 not found: ID does not exist" Sep 29 21:42:41 crc kubenswrapper[4911]: I0929 21:42:41.908624 4911 scope.go:117] "RemoveContainer" containerID="41e8ec04a9b98666109628dbbf6818a83306ab02707f9c132c952dc52f25c03e" Sep 29 21:42:41 crc kubenswrapper[4911]: E0929 21:42:41.909999 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"41e8ec04a9b98666109628dbbf6818a83306ab02707f9c132c952dc52f25c03e\": container with ID starting with 41e8ec04a9b98666109628dbbf6818a83306ab02707f9c132c952dc52f25c03e not found: ID does not exist" containerID="41e8ec04a9b98666109628dbbf6818a83306ab02707f9c132c952dc52f25c03e" Sep 29 21:42:41 crc kubenswrapper[4911]: I0929 21:42:41.910056 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"41e8ec04a9b98666109628dbbf6818a83306ab02707f9c132c952dc52f25c03e"} err="failed to get container status \"41e8ec04a9b98666109628dbbf6818a83306ab02707f9c132c952dc52f25c03e\": rpc error: code = NotFound desc = could not find container \"41e8ec04a9b98666109628dbbf6818a83306ab02707f9c132c952dc52f25c03e\": container with ID starting with 41e8ec04a9b98666109628dbbf6818a83306ab02707f9c132c952dc52f25c03e not found: ID does not exist" Sep 29 21:42:41 crc kubenswrapper[4911]: I0929 21:42:41.910087 4911 scope.go:117] "RemoveContainer" containerID="8fa39b93171fc756c0a52e678b164b1c533b7e74bc5c7a1f12b1e9088acbf304" Sep 29 21:42:41 crc kubenswrapper[4911]: E0929 21:42:41.910473 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8fa39b93171fc756c0a52e678b164b1c533b7e74bc5c7a1f12b1e9088acbf304\": container with ID starting with 8fa39b93171fc756c0a52e678b164b1c533b7e74bc5c7a1f12b1e9088acbf304 not found: ID does not exist" containerID="8fa39b93171fc756c0a52e678b164b1c533b7e74bc5c7a1f12b1e9088acbf304" Sep 29 21:42:41 crc kubenswrapper[4911]: I0929 21:42:41.911108 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8fa39b93171fc756c0a52e678b164b1c533b7e74bc5c7a1f12b1e9088acbf304"} err="failed to get container status \"8fa39b93171fc756c0a52e678b164b1c533b7e74bc5c7a1f12b1e9088acbf304\": rpc error: code = NotFound desc = could not find container \"8fa39b93171fc756c0a52e678b164b1c533b7e74bc5c7a1f12b1e9088acbf304\": container with ID starting with 8fa39b93171fc756c0a52e678b164b1c533b7e74bc5c7a1f12b1e9088acbf304 not found: ID does not exist" Sep 29 21:42:41 crc kubenswrapper[4911]: I0929 21:42:41.911133 4911 scope.go:117] "RemoveContainer" containerID="2614ee3255dbe426372dbbffdeb85a2af5751103c893a0cbca05d0767133e367" Sep 29 21:42:41 crc kubenswrapper[4911]: E0929 21:42:41.911499 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2614ee3255dbe426372dbbffdeb85a2af5751103c893a0cbca05d0767133e367\": container with ID starting with 2614ee3255dbe426372dbbffdeb85a2af5751103c893a0cbca05d0767133e367 not found: ID does not exist" containerID="2614ee3255dbe426372dbbffdeb85a2af5751103c893a0cbca05d0767133e367" Sep 29 21:42:41 crc kubenswrapper[4911]: I0929 21:42:41.911530 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2614ee3255dbe426372dbbffdeb85a2af5751103c893a0cbca05d0767133e367"} err="failed to get container status \"2614ee3255dbe426372dbbffdeb85a2af5751103c893a0cbca05d0767133e367\": rpc error: code = NotFound desc = could not find container \"2614ee3255dbe426372dbbffdeb85a2af5751103c893a0cbca05d0767133e367\": container with ID starting with 2614ee3255dbe426372dbbffdeb85a2af5751103c893a0cbca05d0767133e367 not found: ID does not exist" Sep 29 21:42:41 crc kubenswrapper[4911]: I0929 21:42:41.911552 4911 scope.go:117] "RemoveContainer" containerID="23fbd4c167971799adbe00d529a6565810abfa781e566dbf6dd6f75673928a12" Sep 29 21:42:41 crc kubenswrapper[4911]: I0929 21:42:41.911516 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b5c85b87-pwvb6" Sep 29 21:42:41 crc kubenswrapper[4911]: I0929 21:42:41.913091 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"23fbd4c167971799adbe00d529a6565810abfa781e566dbf6dd6f75673928a12"} err="failed to get container status \"23fbd4c167971799adbe00d529a6565810abfa781e566dbf6dd6f75673928a12\": rpc error: code = NotFound desc = could not find container \"23fbd4c167971799adbe00d529a6565810abfa781e566dbf6dd6f75673928a12\": container with ID starting with 23fbd4c167971799adbe00d529a6565810abfa781e566dbf6dd6f75673928a12 not found: ID does not exist" Sep 29 21:42:41 crc kubenswrapper[4911]: I0929 21:42:41.913120 4911 scope.go:117] "RemoveContainer" containerID="41e8ec04a9b98666109628dbbf6818a83306ab02707f9c132c952dc52f25c03e" Sep 29 21:42:41 crc kubenswrapper[4911]: I0929 21:42:41.913420 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"41e8ec04a9b98666109628dbbf6818a83306ab02707f9c132c952dc52f25c03e"} err="failed to get container status \"41e8ec04a9b98666109628dbbf6818a83306ab02707f9c132c952dc52f25c03e\": rpc error: code = NotFound desc = could not find container \"41e8ec04a9b98666109628dbbf6818a83306ab02707f9c132c952dc52f25c03e\": container with ID starting with 41e8ec04a9b98666109628dbbf6818a83306ab02707f9c132c952dc52f25c03e not found: ID does not exist" Sep 29 21:42:41 crc kubenswrapper[4911]: I0929 21:42:41.913436 4911 scope.go:117] "RemoveContainer" containerID="8fa39b93171fc756c0a52e678b164b1c533b7e74bc5c7a1f12b1e9088acbf304" Sep 29 21:42:41 crc kubenswrapper[4911]: I0929 21:42:41.913639 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8fa39b93171fc756c0a52e678b164b1c533b7e74bc5c7a1f12b1e9088acbf304"} err="failed to get container status \"8fa39b93171fc756c0a52e678b164b1c533b7e74bc5c7a1f12b1e9088acbf304\": rpc error: code = NotFound desc = could not find container \"8fa39b93171fc756c0a52e678b164b1c533b7e74bc5c7a1f12b1e9088acbf304\": container with ID starting with 8fa39b93171fc756c0a52e678b164b1c533b7e74bc5c7a1f12b1e9088acbf304 not found: ID does not exist" Sep 29 21:42:41 crc kubenswrapper[4911]: I0929 21:42:41.913659 4911 scope.go:117] "RemoveContainer" containerID="2614ee3255dbe426372dbbffdeb85a2af5751103c893a0cbca05d0767133e367" Sep 29 21:42:41 crc kubenswrapper[4911]: I0929 21:42:41.913840 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2614ee3255dbe426372dbbffdeb85a2af5751103c893a0cbca05d0767133e367"} err="failed to get container status \"2614ee3255dbe426372dbbffdeb85a2af5751103c893a0cbca05d0767133e367\": rpc error: code = NotFound desc = could not find container \"2614ee3255dbe426372dbbffdeb85a2af5751103c893a0cbca05d0767133e367\": container with ID starting with 2614ee3255dbe426372dbbffdeb85a2af5751103c893a0cbca05d0767133e367 not found: ID does not exist" Sep 29 21:42:41 crc kubenswrapper[4911]: I0929 21:42:41.958101 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-86prj\" (UniqueName: \"kubernetes.io/projected/7dee5647-4d60-4283-b5a0-79cc059d340f-kube-api-access-86prj\") pod \"7dee5647-4d60-4283-b5a0-79cc059d340f\" (UID: \"7dee5647-4d60-4283-b5a0-79cc059d340f\") " Sep 29 21:42:41 crc kubenswrapper[4911]: I0929 21:42:41.958168 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3b39a884-4bfd-4927-af16-6ce025d131fc-scripts\") pod \"3b39a884-4bfd-4927-af16-6ce025d131fc\" (UID: \"3b39a884-4bfd-4927-af16-6ce025d131fc\") " Sep 29 21:42:41 crc kubenswrapper[4911]: I0929 21:42:41.958255 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7dee5647-4d60-4283-b5a0-79cc059d340f-dns-svc\") pod \"7dee5647-4d60-4283-b5a0-79cc059d340f\" (UID: \"7dee5647-4d60-4283-b5a0-79cc059d340f\") " Sep 29 21:42:41 crc kubenswrapper[4911]: I0929 21:42:41.958293 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3b39a884-4bfd-4927-af16-6ce025d131fc-config-data\") pod \"3b39a884-4bfd-4927-af16-6ce025d131fc\" (UID: \"3b39a884-4bfd-4927-af16-6ce025d131fc\") " Sep 29 21:42:41 crc kubenswrapper[4911]: I0929 21:42:41.958320 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b39a884-4bfd-4927-af16-6ce025d131fc-combined-ca-bundle\") pod \"3b39a884-4bfd-4927-af16-6ce025d131fc\" (UID: \"3b39a884-4bfd-4927-af16-6ce025d131fc\") " Sep 29 21:42:41 crc kubenswrapper[4911]: I0929 21:42:41.958348 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3b39a884-4bfd-4927-af16-6ce025d131fc-etc-machine-id\") pod \"3b39a884-4bfd-4927-af16-6ce025d131fc\" (UID: \"3b39a884-4bfd-4927-af16-6ce025d131fc\") " Sep 29 21:42:41 crc kubenswrapper[4911]: I0929 21:42:41.958408 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4ptsx\" (UniqueName: \"kubernetes.io/projected/3b39a884-4bfd-4927-af16-6ce025d131fc-kube-api-access-4ptsx\") pod \"3b39a884-4bfd-4927-af16-6ce025d131fc\" (UID: \"3b39a884-4bfd-4927-af16-6ce025d131fc\") " Sep 29 21:42:41 crc kubenswrapper[4911]: I0929 21:42:41.958449 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7dee5647-4d60-4283-b5a0-79cc059d340f-dns-swift-storage-0\") pod \"7dee5647-4d60-4283-b5a0-79cc059d340f\" (UID: \"7dee5647-4d60-4283-b5a0-79cc059d340f\") " Sep 29 21:42:41 crc kubenswrapper[4911]: I0929 21:42:41.958553 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/3b39a884-4bfd-4927-af16-6ce025d131fc-db-sync-config-data\") pod \"3b39a884-4bfd-4927-af16-6ce025d131fc\" (UID: \"3b39a884-4bfd-4927-af16-6ce025d131fc\") " Sep 29 21:42:41 crc kubenswrapper[4911]: I0929 21:42:41.958588 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7dee5647-4d60-4283-b5a0-79cc059d340f-config\") pod \"7dee5647-4d60-4283-b5a0-79cc059d340f\" (UID: \"7dee5647-4d60-4283-b5a0-79cc059d340f\") " Sep 29 21:42:41 crc kubenswrapper[4911]: I0929 21:42:41.958618 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7dee5647-4d60-4283-b5a0-79cc059d340f-ovsdbserver-nb\") pod \"7dee5647-4d60-4283-b5a0-79cc059d340f\" (UID: \"7dee5647-4d60-4283-b5a0-79cc059d340f\") " Sep 29 21:42:41 crc kubenswrapper[4911]: I0929 21:42:41.958678 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7dee5647-4d60-4283-b5a0-79cc059d340f-ovsdbserver-sb\") pod \"7dee5647-4d60-4283-b5a0-79cc059d340f\" (UID: \"7dee5647-4d60-4283-b5a0-79cc059d340f\") " Sep 29 21:42:41 crc kubenswrapper[4911]: I0929 21:42:41.973942 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3b39a884-4bfd-4927-af16-6ce025d131fc-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "3b39a884-4bfd-4927-af16-6ce025d131fc" (UID: "3b39a884-4bfd-4927-af16-6ce025d131fc"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 21:42:41 crc kubenswrapper[4911]: I0929 21:42:41.980502 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b39a884-4bfd-4927-af16-6ce025d131fc-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "3b39a884-4bfd-4927-af16-6ce025d131fc" (UID: "3b39a884-4bfd-4927-af16-6ce025d131fc"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:42:41 crc kubenswrapper[4911]: I0929 21:42:41.983769 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b39a884-4bfd-4927-af16-6ce025d131fc-scripts" (OuterVolumeSpecName: "scripts") pod "3b39a884-4bfd-4927-af16-6ce025d131fc" (UID: "3b39a884-4bfd-4927-af16-6ce025d131fc"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:42:41 crc kubenswrapper[4911]: I0929 21:42:41.983917 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7dee5647-4d60-4283-b5a0-79cc059d340f-kube-api-access-86prj" (OuterVolumeSpecName: "kube-api-access-86prj") pod "7dee5647-4d60-4283-b5a0-79cc059d340f" (UID: "7dee5647-4d60-4283-b5a0-79cc059d340f"). InnerVolumeSpecName "kube-api-access-86prj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:42:41 crc kubenswrapper[4911]: I0929 21:42:41.989585 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3b39a884-4bfd-4927-af16-6ce025d131fc-kube-api-access-4ptsx" (OuterVolumeSpecName: "kube-api-access-4ptsx") pod "3b39a884-4bfd-4927-af16-6ce025d131fc" (UID: "3b39a884-4bfd-4927-af16-6ce025d131fc"). InnerVolumeSpecName "kube-api-access-4ptsx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:42:42 crc kubenswrapper[4911]: I0929 21:42:42.067672 4911 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/3b39a884-4bfd-4927-af16-6ce025d131fc-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:42 crc kubenswrapper[4911]: I0929 21:42:42.067704 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-86prj\" (UniqueName: \"kubernetes.io/projected/7dee5647-4d60-4283-b5a0-79cc059d340f-kube-api-access-86prj\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:42 crc kubenswrapper[4911]: I0929 21:42:42.067714 4911 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3b39a884-4bfd-4927-af16-6ce025d131fc-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:42 crc kubenswrapper[4911]: I0929 21:42:42.067723 4911 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3b39a884-4bfd-4927-af16-6ce025d131fc-etc-machine-id\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:42 crc kubenswrapper[4911]: I0929 21:42:42.067732 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4ptsx\" (UniqueName: \"kubernetes.io/projected/3b39a884-4bfd-4927-af16-6ce025d131fc-kube-api-access-4ptsx\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:42 crc kubenswrapper[4911]: I0929 21:42:42.132152 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 21:42:42 crc kubenswrapper[4911]: I0929 21:42:42.198017 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b39a884-4bfd-4927-af16-6ce025d131fc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3b39a884-4bfd-4927-af16-6ce025d131fc" (UID: "3b39a884-4bfd-4927-af16-6ce025d131fc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:42:42 crc kubenswrapper[4911]: I0929 21:42:42.228638 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7dee5647-4d60-4283-b5a0-79cc059d340f-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "7dee5647-4d60-4283-b5a0-79cc059d340f" (UID: "7dee5647-4d60-4283-b5a0-79cc059d340f"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:42:42 crc kubenswrapper[4911]: I0929 21:42:42.233408 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b39a884-4bfd-4927-af16-6ce025d131fc-config-data" (OuterVolumeSpecName: "config-data") pod "3b39a884-4bfd-4927-af16-6ce025d131fc" (UID: "3b39a884-4bfd-4927-af16-6ce025d131fc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:42:42 crc kubenswrapper[4911]: I0929 21:42:42.236801 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7dee5647-4d60-4283-b5a0-79cc059d340f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "7dee5647-4d60-4283-b5a0-79cc059d340f" (UID: "7dee5647-4d60-4283-b5a0-79cc059d340f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:42:42 crc kubenswrapper[4911]: I0929 21:42:42.242236 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7dee5647-4d60-4283-b5a0-79cc059d340f-config" (OuterVolumeSpecName: "config") pod "7dee5647-4d60-4283-b5a0-79cc059d340f" (UID: "7dee5647-4d60-4283-b5a0-79cc059d340f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:42:42 crc kubenswrapper[4911]: I0929 21:42:42.247052 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7dee5647-4d60-4283-b5a0-79cc059d340f-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "7dee5647-4d60-4283-b5a0-79cc059d340f" (UID: "7dee5647-4d60-4283-b5a0-79cc059d340f"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:42:42 crc kubenswrapper[4911]: I0929 21:42:42.248647 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7dee5647-4d60-4283-b5a0-79cc059d340f-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "7dee5647-4d60-4283-b5a0-79cc059d340f" (UID: "7dee5647-4d60-4283-b5a0-79cc059d340f"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:42:42 crc kubenswrapper[4911]: I0929 21:42:42.286893 4911 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7dee5647-4d60-4283-b5a0-79cc059d340f-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:42 crc kubenswrapper[4911]: I0929 21:42:42.287203 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3b39a884-4bfd-4927-af16-6ce025d131fc-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:42 crc kubenswrapper[4911]: I0929 21:42:42.287214 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b39a884-4bfd-4927-af16-6ce025d131fc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:42 crc kubenswrapper[4911]: I0929 21:42:42.287225 4911 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7dee5647-4d60-4283-b5a0-79cc059d340f-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:42 crc kubenswrapper[4911]: I0929 21:42:42.287234 4911 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7dee5647-4d60-4283-b5a0-79cc059d340f-config\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:42 crc kubenswrapper[4911]: I0929 21:42:42.287265 4911 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7dee5647-4d60-4283-b5a0-79cc059d340f-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:42 crc kubenswrapper[4911]: I0929 21:42:42.287274 4911 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7dee5647-4d60-4283-b5a0-79cc059d340f-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:42 crc kubenswrapper[4911]: I0929 21:42:42.581233 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"3ca83d01-449e-43fd-a6e3-3a1da30ec45b","Type":"ContainerStarted","Data":"b47cd2d9ff0b60c1c95ee0842cd663da1ac8e6b70d8699d181e43e56b3d09280"} Sep 29 21:42:42 crc kubenswrapper[4911]: I0929 21:42:42.585570 4911 generic.go:334] "Generic (PLEG): container finished" podID="40533d70-c10c-4b0e-b870-f50e423ccdc5" containerID="c74c12f3e3fbd37473edcf5cbd3e6907e4c97317591e7796d4c7d9e742e6cb44" exitCode=143 Sep 29 21:42:42 crc kubenswrapper[4911]: I0929 21:42:42.585639 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-79b7f696f8-zxrwh" event={"ID":"40533d70-c10c-4b0e-b870-f50e423ccdc5","Type":"ContainerDied","Data":"c74c12f3e3fbd37473edcf5cbd3e6907e4c97317591e7796d4c7d9e742e6cb44"} Sep 29 21:42:42 crc kubenswrapper[4911]: I0929 21:42:42.586974 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"615c8a3f-a4f7-4017-aa61-d2525148b6cd","Type":"ContainerStarted","Data":"9606cbb1f625e7ad8f80a17e1969446b520129278a1972046a109602c3e22b59"} Sep 29 21:42:42 crc kubenswrapper[4911]: I0929 21:42:42.589055 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b5c85b87-pwvb6" event={"ID":"7dee5647-4d60-4283-b5a0-79cc059d340f","Type":"ContainerDied","Data":"d20dfecb76cfdc4570f11edac7ba83ef2d6c3e8903caf431ccc01a474f05d78b"} Sep 29 21:42:42 crc kubenswrapper[4911]: I0929 21:42:42.589093 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b5c85b87-pwvb6" Sep 29 21:42:42 crc kubenswrapper[4911]: I0929 21:42:42.589093 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-m4r9b" Sep 29 21:42:42 crc kubenswrapper[4911]: I0929 21:42:42.589094 4911 scope.go:117] "RemoveContainer" containerID="b46ef6c12a3a0cdc0f13eebd9f70a13e6d26a43aa068cbbed851462d693e8578" Sep 29 21:42:42 crc kubenswrapper[4911]: I0929 21:42:42.608596 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=3.046790565 podStartE2EDuration="14.608565916s" podCreationTimestamp="2025-09-29 21:42:28 +0000 UTC" firstStartedPulling="2025-09-29 21:42:30.102162266 +0000 UTC m=+1028.079274927" lastFinishedPulling="2025-09-29 21:42:41.663937607 +0000 UTC m=+1039.641050278" observedRunningTime="2025-09-29 21:42:42.600842326 +0000 UTC m=+1040.577955007" watchObservedRunningTime="2025-09-29 21:42:42.608565916 +0000 UTC m=+1040.585678597" Sep 29 21:42:42 crc kubenswrapper[4911]: I0929 21:42:42.627872 4911 scope.go:117] "RemoveContainer" containerID="de4e20c9f2c95db0bc8fb0100ef822644022641040189e980b231ffd7f12ce26" Sep 29 21:42:42 crc kubenswrapper[4911]: I0929 21:42:42.669285 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-pwvb6"] Sep 29 21:42:42 crc kubenswrapper[4911]: I0929 21:42:42.689181 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-pwvb6"] Sep 29 21:42:42 crc kubenswrapper[4911]: I0929 21:42:42.718265 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7dee5647-4d60-4283-b5a0-79cc059d340f" path="/var/lib/kubelet/pods/7dee5647-4d60-4283-b5a0-79cc059d340f/volumes" Sep 29 21:42:42 crc kubenswrapper[4911]: I0929 21:42:42.942431 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 21:42:42 crc kubenswrapper[4911]: I0929 21:42:42.990896 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 21:42:42 crc kubenswrapper[4911]: E0929 21:42:42.991327 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b39a884-4bfd-4927-af16-6ce025d131fc" containerName="cinder-db-sync" Sep 29 21:42:42 crc kubenswrapper[4911]: I0929 21:42:42.991342 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b39a884-4bfd-4927-af16-6ce025d131fc" containerName="cinder-db-sync" Sep 29 21:42:42 crc kubenswrapper[4911]: E0929 21:42:42.991361 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7dee5647-4d60-4283-b5a0-79cc059d340f" containerName="dnsmasq-dns" Sep 29 21:42:42 crc kubenswrapper[4911]: I0929 21:42:42.991367 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="7dee5647-4d60-4283-b5a0-79cc059d340f" containerName="dnsmasq-dns" Sep 29 21:42:42 crc kubenswrapper[4911]: E0929 21:42:42.991374 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7dee5647-4d60-4283-b5a0-79cc059d340f" containerName="init" Sep 29 21:42:42 crc kubenswrapper[4911]: I0929 21:42:42.991380 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="7dee5647-4d60-4283-b5a0-79cc059d340f" containerName="init" Sep 29 21:42:42 crc kubenswrapper[4911]: I0929 21:42:42.991561 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="7dee5647-4d60-4283-b5a0-79cc059d340f" containerName="dnsmasq-dns" Sep 29 21:42:42 crc kubenswrapper[4911]: I0929 21:42:42.991583 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="3b39a884-4bfd-4927-af16-6ce025d131fc" containerName="cinder-db-sync" Sep 29 21:42:42 crc kubenswrapper[4911]: I0929 21:42:42.992525 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 29 21:42:42 crc kubenswrapper[4911]: I0929 21:42:42.996947 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Sep 29 21:42:42 crc kubenswrapper[4911]: I0929 21:42:42.997162 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-sh6vk" Sep 29 21:42:43 crc kubenswrapper[4911]: I0929 21:42:43.004204 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 21:42:43 crc kubenswrapper[4911]: I0929 21:42:43.009093 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Sep 29 21:42:43 crc kubenswrapper[4911]: I0929 21:42:43.009399 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Sep 29 21:42:43 crc kubenswrapper[4911]: I0929 21:42:43.088727 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-q9jx9"] Sep 29 21:42:43 crc kubenswrapper[4911]: I0929 21:42:43.097453 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5784cf869f-q9jx9" Sep 29 21:42:43 crc kubenswrapper[4911]: I0929 21:42:43.112953 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-q9jx9"] Sep 29 21:42:43 crc kubenswrapper[4911]: I0929 21:42:43.125472 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8ed0335-40f9-4ef6-be8e-8866dce897f4-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"f8ed0335-40f9-4ef6-be8e-8866dce897f4\") " pod="openstack/cinder-scheduler-0" Sep 29 21:42:43 crc kubenswrapper[4911]: I0929 21:42:43.125530 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f8ed0335-40f9-4ef6-be8e-8866dce897f4-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"f8ed0335-40f9-4ef6-be8e-8866dce897f4\") " pod="openstack/cinder-scheduler-0" Sep 29 21:42:43 crc kubenswrapper[4911]: I0929 21:42:43.125591 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f8ed0335-40f9-4ef6-be8e-8866dce897f4-scripts\") pod \"cinder-scheduler-0\" (UID: \"f8ed0335-40f9-4ef6-be8e-8866dce897f4\") " pod="openstack/cinder-scheduler-0" Sep 29 21:42:43 crc kubenswrapper[4911]: I0929 21:42:43.125636 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zv8wz\" (UniqueName: \"kubernetes.io/projected/f8ed0335-40f9-4ef6-be8e-8866dce897f4-kube-api-access-zv8wz\") pod \"cinder-scheduler-0\" (UID: \"f8ed0335-40f9-4ef6-be8e-8866dce897f4\") " pod="openstack/cinder-scheduler-0" Sep 29 21:42:43 crc kubenswrapper[4911]: I0929 21:42:43.125667 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f8ed0335-40f9-4ef6-be8e-8866dce897f4-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"f8ed0335-40f9-4ef6-be8e-8866dce897f4\") " pod="openstack/cinder-scheduler-0" Sep 29 21:42:43 crc kubenswrapper[4911]: I0929 21:42:43.125686 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f8ed0335-40f9-4ef6-be8e-8866dce897f4-config-data\") pod \"cinder-scheduler-0\" (UID: \"f8ed0335-40f9-4ef6-be8e-8866dce897f4\") " pod="openstack/cinder-scheduler-0" Sep 29 21:42:43 crc kubenswrapper[4911]: I0929 21:42:43.228574 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f8ed0335-40f9-4ef6-be8e-8866dce897f4-scripts\") pod \"cinder-scheduler-0\" (UID: \"f8ed0335-40f9-4ef6-be8e-8866dce897f4\") " pod="openstack/cinder-scheduler-0" Sep 29 21:42:43 crc kubenswrapper[4911]: I0929 21:42:43.228692 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zv8wz\" (UniqueName: \"kubernetes.io/projected/f8ed0335-40f9-4ef6-be8e-8866dce897f4-kube-api-access-zv8wz\") pod \"cinder-scheduler-0\" (UID: \"f8ed0335-40f9-4ef6-be8e-8866dce897f4\") " pod="openstack/cinder-scheduler-0" Sep 29 21:42:43 crc kubenswrapper[4911]: I0929 21:42:43.228755 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f8ed0335-40f9-4ef6-be8e-8866dce897f4-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"f8ed0335-40f9-4ef6-be8e-8866dce897f4\") " pod="openstack/cinder-scheduler-0" Sep 29 21:42:43 crc kubenswrapper[4911]: I0929 21:42:43.228780 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f8ed0335-40f9-4ef6-be8e-8866dce897f4-config-data\") pod \"cinder-scheduler-0\" (UID: \"f8ed0335-40f9-4ef6-be8e-8866dce897f4\") " pod="openstack/cinder-scheduler-0" Sep 29 21:42:43 crc kubenswrapper[4911]: I0929 21:42:43.228844 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ff4vd\" (UniqueName: \"kubernetes.io/projected/fcbf9287-d564-45b2-b6be-4cbed087355f-kube-api-access-ff4vd\") pod \"dnsmasq-dns-5784cf869f-q9jx9\" (UID: \"fcbf9287-d564-45b2-b6be-4cbed087355f\") " pod="openstack/dnsmasq-dns-5784cf869f-q9jx9" Sep 29 21:42:43 crc kubenswrapper[4911]: I0929 21:42:43.228871 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/fcbf9287-d564-45b2-b6be-4cbed087355f-dns-swift-storage-0\") pod \"dnsmasq-dns-5784cf869f-q9jx9\" (UID: \"fcbf9287-d564-45b2-b6be-4cbed087355f\") " pod="openstack/dnsmasq-dns-5784cf869f-q9jx9" Sep 29 21:42:43 crc kubenswrapper[4911]: I0929 21:42:43.228890 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8ed0335-40f9-4ef6-be8e-8866dce897f4-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"f8ed0335-40f9-4ef6-be8e-8866dce897f4\") " pod="openstack/cinder-scheduler-0" Sep 29 21:42:43 crc kubenswrapper[4911]: I0929 21:42:43.228923 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fcbf9287-d564-45b2-b6be-4cbed087355f-ovsdbserver-sb\") pod \"dnsmasq-dns-5784cf869f-q9jx9\" (UID: \"fcbf9287-d564-45b2-b6be-4cbed087355f\") " pod="openstack/dnsmasq-dns-5784cf869f-q9jx9" Sep 29 21:42:43 crc kubenswrapper[4911]: I0929 21:42:43.228948 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fcbf9287-d564-45b2-b6be-4cbed087355f-config\") pod \"dnsmasq-dns-5784cf869f-q9jx9\" (UID: \"fcbf9287-d564-45b2-b6be-4cbed087355f\") " pod="openstack/dnsmasq-dns-5784cf869f-q9jx9" Sep 29 21:42:43 crc kubenswrapper[4911]: I0929 21:42:43.228974 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f8ed0335-40f9-4ef6-be8e-8866dce897f4-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"f8ed0335-40f9-4ef6-be8e-8866dce897f4\") " pod="openstack/cinder-scheduler-0" Sep 29 21:42:43 crc kubenswrapper[4911]: I0929 21:42:43.228994 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fcbf9287-d564-45b2-b6be-4cbed087355f-ovsdbserver-nb\") pod \"dnsmasq-dns-5784cf869f-q9jx9\" (UID: \"fcbf9287-d564-45b2-b6be-4cbed087355f\") " pod="openstack/dnsmasq-dns-5784cf869f-q9jx9" Sep 29 21:42:43 crc kubenswrapper[4911]: I0929 21:42:43.229882 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Sep 29 21:42:43 crc kubenswrapper[4911]: I0929 21:42:43.230182 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fcbf9287-d564-45b2-b6be-4cbed087355f-dns-svc\") pod \"dnsmasq-dns-5784cf869f-q9jx9\" (UID: \"fcbf9287-d564-45b2-b6be-4cbed087355f\") " pod="openstack/dnsmasq-dns-5784cf869f-q9jx9" Sep 29 21:42:43 crc kubenswrapper[4911]: I0929 21:42:43.230293 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f8ed0335-40f9-4ef6-be8e-8866dce897f4-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"f8ed0335-40f9-4ef6-be8e-8866dce897f4\") " pod="openstack/cinder-scheduler-0" Sep 29 21:42:43 crc kubenswrapper[4911]: I0929 21:42:43.232656 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f8ed0335-40f9-4ef6-be8e-8866dce897f4-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"f8ed0335-40f9-4ef6-be8e-8866dce897f4\") " pod="openstack/cinder-scheduler-0" Sep 29 21:42:43 crc kubenswrapper[4911]: I0929 21:42:43.238214 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f8ed0335-40f9-4ef6-be8e-8866dce897f4-config-data\") pod \"cinder-scheduler-0\" (UID: \"f8ed0335-40f9-4ef6-be8e-8866dce897f4\") " pod="openstack/cinder-scheduler-0" Sep 29 21:42:43 crc kubenswrapper[4911]: I0929 21:42:43.241721 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8ed0335-40f9-4ef6-be8e-8866dce897f4-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"f8ed0335-40f9-4ef6-be8e-8866dce897f4\") " pod="openstack/cinder-scheduler-0" Sep 29 21:42:43 crc kubenswrapper[4911]: I0929 21:42:43.243341 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f8ed0335-40f9-4ef6-be8e-8866dce897f4-scripts\") pod \"cinder-scheduler-0\" (UID: \"f8ed0335-40f9-4ef6-be8e-8866dce897f4\") " pod="openstack/cinder-scheduler-0" Sep 29 21:42:43 crc kubenswrapper[4911]: I0929 21:42:43.259818 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Sep 29 21:42:43 crc kubenswrapper[4911]: I0929 21:42:43.259927 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 29 21:42:43 crc kubenswrapper[4911]: I0929 21:42:43.262140 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Sep 29 21:42:43 crc kubenswrapper[4911]: I0929 21:42:43.273259 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zv8wz\" (UniqueName: \"kubernetes.io/projected/f8ed0335-40f9-4ef6-be8e-8866dce897f4-kube-api-access-zv8wz\") pod \"cinder-scheduler-0\" (UID: \"f8ed0335-40f9-4ef6-be8e-8866dce897f4\") " pod="openstack/cinder-scheduler-0" Sep 29 21:42:43 crc kubenswrapper[4911]: I0929 21:42:43.310494 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 29 21:42:43 crc kubenswrapper[4911]: I0929 21:42:43.331685 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/72f17fb5-75ee-454c-960c-a5e8471f4113-logs\") pod \"cinder-api-0\" (UID: \"72f17fb5-75ee-454c-960c-a5e8471f4113\") " pod="openstack/cinder-api-0" Sep 29 21:42:43 crc kubenswrapper[4911]: I0929 21:42:43.332021 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/72f17fb5-75ee-454c-960c-a5e8471f4113-config-data-custom\") pod \"cinder-api-0\" (UID: \"72f17fb5-75ee-454c-960c-a5e8471f4113\") " pod="openstack/cinder-api-0" Sep 29 21:42:43 crc kubenswrapper[4911]: I0929 21:42:43.332040 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72f17fb5-75ee-454c-960c-a5e8471f4113-config-data\") pod \"cinder-api-0\" (UID: \"72f17fb5-75ee-454c-960c-a5e8471f4113\") " pod="openstack/cinder-api-0" Sep 29 21:42:43 crc kubenswrapper[4911]: I0929 21:42:43.332061 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/72f17fb5-75ee-454c-960c-a5e8471f4113-etc-machine-id\") pod \"cinder-api-0\" (UID: \"72f17fb5-75ee-454c-960c-a5e8471f4113\") " pod="openstack/cinder-api-0" Sep 29 21:42:43 crc kubenswrapper[4911]: I0929 21:42:43.332096 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72f17fb5-75ee-454c-960c-a5e8471f4113-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"72f17fb5-75ee-454c-960c-a5e8471f4113\") " pod="openstack/cinder-api-0" Sep 29 21:42:43 crc kubenswrapper[4911]: I0929 21:42:43.332131 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ff4vd\" (UniqueName: \"kubernetes.io/projected/fcbf9287-d564-45b2-b6be-4cbed087355f-kube-api-access-ff4vd\") pod \"dnsmasq-dns-5784cf869f-q9jx9\" (UID: \"fcbf9287-d564-45b2-b6be-4cbed087355f\") " pod="openstack/dnsmasq-dns-5784cf869f-q9jx9" Sep 29 21:42:43 crc kubenswrapper[4911]: I0929 21:42:43.332150 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/fcbf9287-d564-45b2-b6be-4cbed087355f-dns-swift-storage-0\") pod \"dnsmasq-dns-5784cf869f-q9jx9\" (UID: \"fcbf9287-d564-45b2-b6be-4cbed087355f\") " pod="openstack/dnsmasq-dns-5784cf869f-q9jx9" Sep 29 21:42:43 crc kubenswrapper[4911]: I0929 21:42:43.332175 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fcbf9287-d564-45b2-b6be-4cbed087355f-ovsdbserver-sb\") pod \"dnsmasq-dns-5784cf869f-q9jx9\" (UID: \"fcbf9287-d564-45b2-b6be-4cbed087355f\") " pod="openstack/dnsmasq-dns-5784cf869f-q9jx9" Sep 29 21:42:43 crc kubenswrapper[4911]: I0929 21:42:43.332195 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fcbf9287-d564-45b2-b6be-4cbed087355f-config\") pod \"dnsmasq-dns-5784cf869f-q9jx9\" (UID: \"fcbf9287-d564-45b2-b6be-4cbed087355f\") " pod="openstack/dnsmasq-dns-5784cf869f-q9jx9" Sep 29 21:42:43 crc kubenswrapper[4911]: I0929 21:42:43.332219 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fcbf9287-d564-45b2-b6be-4cbed087355f-ovsdbserver-nb\") pod \"dnsmasq-dns-5784cf869f-q9jx9\" (UID: \"fcbf9287-d564-45b2-b6be-4cbed087355f\") " pod="openstack/dnsmasq-dns-5784cf869f-q9jx9" Sep 29 21:42:43 crc kubenswrapper[4911]: I0929 21:42:43.332261 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/72f17fb5-75ee-454c-960c-a5e8471f4113-scripts\") pod \"cinder-api-0\" (UID: \"72f17fb5-75ee-454c-960c-a5e8471f4113\") " pod="openstack/cinder-api-0" Sep 29 21:42:43 crc kubenswrapper[4911]: I0929 21:42:43.332278 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-scrkb\" (UniqueName: \"kubernetes.io/projected/72f17fb5-75ee-454c-960c-a5e8471f4113-kube-api-access-scrkb\") pod \"cinder-api-0\" (UID: \"72f17fb5-75ee-454c-960c-a5e8471f4113\") " pod="openstack/cinder-api-0" Sep 29 21:42:43 crc kubenswrapper[4911]: I0929 21:42:43.332314 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fcbf9287-d564-45b2-b6be-4cbed087355f-dns-svc\") pod \"dnsmasq-dns-5784cf869f-q9jx9\" (UID: \"fcbf9287-d564-45b2-b6be-4cbed087355f\") " pod="openstack/dnsmasq-dns-5784cf869f-q9jx9" Sep 29 21:42:43 crc kubenswrapper[4911]: I0929 21:42:43.335667 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fcbf9287-d564-45b2-b6be-4cbed087355f-dns-svc\") pod \"dnsmasq-dns-5784cf869f-q9jx9\" (UID: \"fcbf9287-d564-45b2-b6be-4cbed087355f\") " pod="openstack/dnsmasq-dns-5784cf869f-q9jx9" Sep 29 21:42:43 crc kubenswrapper[4911]: I0929 21:42:43.338085 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fcbf9287-d564-45b2-b6be-4cbed087355f-ovsdbserver-nb\") pod \"dnsmasq-dns-5784cf869f-q9jx9\" (UID: \"fcbf9287-d564-45b2-b6be-4cbed087355f\") " pod="openstack/dnsmasq-dns-5784cf869f-q9jx9" Sep 29 21:42:43 crc kubenswrapper[4911]: I0929 21:42:43.338222 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fcbf9287-d564-45b2-b6be-4cbed087355f-ovsdbserver-sb\") pod \"dnsmasq-dns-5784cf869f-q9jx9\" (UID: \"fcbf9287-d564-45b2-b6be-4cbed087355f\") " pod="openstack/dnsmasq-dns-5784cf869f-q9jx9" Sep 29 21:42:43 crc kubenswrapper[4911]: I0929 21:42:43.338468 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fcbf9287-d564-45b2-b6be-4cbed087355f-config\") pod \"dnsmasq-dns-5784cf869f-q9jx9\" (UID: \"fcbf9287-d564-45b2-b6be-4cbed087355f\") " pod="openstack/dnsmasq-dns-5784cf869f-q9jx9" Sep 29 21:42:43 crc kubenswrapper[4911]: I0929 21:42:43.338914 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/fcbf9287-d564-45b2-b6be-4cbed087355f-dns-swift-storage-0\") pod \"dnsmasq-dns-5784cf869f-q9jx9\" (UID: \"fcbf9287-d564-45b2-b6be-4cbed087355f\") " pod="openstack/dnsmasq-dns-5784cf869f-q9jx9" Sep 29 21:42:43 crc kubenswrapper[4911]: I0929 21:42:43.355800 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ff4vd\" (UniqueName: \"kubernetes.io/projected/fcbf9287-d564-45b2-b6be-4cbed087355f-kube-api-access-ff4vd\") pod \"dnsmasq-dns-5784cf869f-q9jx9\" (UID: \"fcbf9287-d564-45b2-b6be-4cbed087355f\") " pod="openstack/dnsmasq-dns-5784cf869f-q9jx9" Sep 29 21:42:43 crc kubenswrapper[4911]: I0929 21:42:43.429260 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5784cf869f-q9jx9" Sep 29 21:42:43 crc kubenswrapper[4911]: I0929 21:42:43.433997 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/72f17fb5-75ee-454c-960c-a5e8471f4113-logs\") pod \"cinder-api-0\" (UID: \"72f17fb5-75ee-454c-960c-a5e8471f4113\") " pod="openstack/cinder-api-0" Sep 29 21:42:43 crc kubenswrapper[4911]: I0929 21:42:43.434042 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/72f17fb5-75ee-454c-960c-a5e8471f4113-config-data-custom\") pod \"cinder-api-0\" (UID: \"72f17fb5-75ee-454c-960c-a5e8471f4113\") " pod="openstack/cinder-api-0" Sep 29 21:42:43 crc kubenswrapper[4911]: I0929 21:42:43.434062 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72f17fb5-75ee-454c-960c-a5e8471f4113-config-data\") pod \"cinder-api-0\" (UID: \"72f17fb5-75ee-454c-960c-a5e8471f4113\") " pod="openstack/cinder-api-0" Sep 29 21:42:43 crc kubenswrapper[4911]: I0929 21:42:43.434086 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/72f17fb5-75ee-454c-960c-a5e8471f4113-etc-machine-id\") pod \"cinder-api-0\" (UID: \"72f17fb5-75ee-454c-960c-a5e8471f4113\") " pod="openstack/cinder-api-0" Sep 29 21:42:43 crc kubenswrapper[4911]: I0929 21:42:43.434118 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72f17fb5-75ee-454c-960c-a5e8471f4113-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"72f17fb5-75ee-454c-960c-a5e8471f4113\") " pod="openstack/cinder-api-0" Sep 29 21:42:43 crc kubenswrapper[4911]: I0929 21:42:43.434166 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/72f17fb5-75ee-454c-960c-a5e8471f4113-scripts\") pod \"cinder-api-0\" (UID: \"72f17fb5-75ee-454c-960c-a5e8471f4113\") " pod="openstack/cinder-api-0" Sep 29 21:42:43 crc kubenswrapper[4911]: I0929 21:42:43.434180 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-scrkb\" (UniqueName: \"kubernetes.io/projected/72f17fb5-75ee-454c-960c-a5e8471f4113-kube-api-access-scrkb\") pod \"cinder-api-0\" (UID: \"72f17fb5-75ee-454c-960c-a5e8471f4113\") " pod="openstack/cinder-api-0" Sep 29 21:42:43 crc kubenswrapper[4911]: I0929 21:42:43.434817 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/72f17fb5-75ee-454c-960c-a5e8471f4113-logs\") pod \"cinder-api-0\" (UID: \"72f17fb5-75ee-454c-960c-a5e8471f4113\") " pod="openstack/cinder-api-0" Sep 29 21:42:43 crc kubenswrapper[4911]: I0929 21:42:43.435199 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/72f17fb5-75ee-454c-960c-a5e8471f4113-etc-machine-id\") pod \"cinder-api-0\" (UID: \"72f17fb5-75ee-454c-960c-a5e8471f4113\") " pod="openstack/cinder-api-0" Sep 29 21:42:43 crc kubenswrapper[4911]: I0929 21:42:43.439868 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72f17fb5-75ee-454c-960c-a5e8471f4113-config-data\") pod \"cinder-api-0\" (UID: \"72f17fb5-75ee-454c-960c-a5e8471f4113\") " pod="openstack/cinder-api-0" Sep 29 21:42:43 crc kubenswrapper[4911]: I0929 21:42:43.441291 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/72f17fb5-75ee-454c-960c-a5e8471f4113-config-data-custom\") pod \"cinder-api-0\" (UID: \"72f17fb5-75ee-454c-960c-a5e8471f4113\") " pod="openstack/cinder-api-0" Sep 29 21:42:43 crc kubenswrapper[4911]: I0929 21:42:43.444237 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/72f17fb5-75ee-454c-960c-a5e8471f4113-scripts\") pod \"cinder-api-0\" (UID: \"72f17fb5-75ee-454c-960c-a5e8471f4113\") " pod="openstack/cinder-api-0" Sep 29 21:42:43 crc kubenswrapper[4911]: I0929 21:42:43.444367 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72f17fb5-75ee-454c-960c-a5e8471f4113-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"72f17fb5-75ee-454c-960c-a5e8471f4113\") " pod="openstack/cinder-api-0" Sep 29 21:42:43 crc kubenswrapper[4911]: I0929 21:42:43.459297 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-scrkb\" (UniqueName: \"kubernetes.io/projected/72f17fb5-75ee-454c-960c-a5e8471f4113-kube-api-access-scrkb\") pod \"cinder-api-0\" (UID: \"72f17fb5-75ee-454c-960c-a5e8471f4113\") " pod="openstack/cinder-api-0" Sep 29 21:42:43 crc kubenswrapper[4911]: I0929 21:42:43.615265 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 29 21:42:43 crc kubenswrapper[4911]: I0929 21:42:43.686515 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"615c8a3f-a4f7-4017-aa61-d2525148b6cd","Type":"ContainerStarted","Data":"80e3f75c12263b8d6f8808be94f12fae883645d6ede4499e95330531ed7aa9f2"} Sep 29 21:42:43 crc kubenswrapper[4911]: I0929 21:42:43.965155 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 21:42:44 crc kubenswrapper[4911]: I0929 21:42:44.003912 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-c9f6bd49f-qt4wx" Sep 29 21:42:44 crc kubenswrapper[4911]: I0929 21:42:44.005300 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-c9f6bd49f-qt4wx" Sep 29 21:42:44 crc kubenswrapper[4911]: I0929 21:42:44.045708 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-q9jx9"] Sep 29 21:42:44 crc kubenswrapper[4911]: I0929 21:42:44.201691 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Sep 29 21:42:44 crc kubenswrapper[4911]: W0929 21:42:44.212916 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod72f17fb5_75ee_454c_960c_a5e8471f4113.slice/crio-d05ae18c8fbee53a494b76ed3979ddadd0d9931543944db95ef605afb977b708 WatchSource:0}: Error finding container d05ae18c8fbee53a494b76ed3979ddadd0d9931543944db95ef605afb977b708: Status 404 returned error can't find the container with id d05ae18c8fbee53a494b76ed3979ddadd0d9931543944db95ef605afb977b708 Sep 29 21:42:44 crc kubenswrapper[4911]: I0929 21:42:44.736559 4911 generic.go:334] "Generic (PLEG): container finished" podID="fcbf9287-d564-45b2-b6be-4cbed087355f" containerID="7e1c7dd51da3b0b8db72b0f1271c24aebd4d6fcc8294ab0005c70f59c750e0b5" exitCode=0 Sep 29 21:42:44 crc kubenswrapper[4911]: I0929 21:42:44.750885 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-q9jx9" event={"ID":"fcbf9287-d564-45b2-b6be-4cbed087355f","Type":"ContainerDied","Data":"7e1c7dd51da3b0b8db72b0f1271c24aebd4d6fcc8294ab0005c70f59c750e0b5"} Sep 29 21:42:44 crc kubenswrapper[4911]: I0929 21:42:44.750927 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-q9jx9" event={"ID":"fcbf9287-d564-45b2-b6be-4cbed087355f","Type":"ContainerStarted","Data":"e05ebfc79a3be6f3a75cc3dd135da6869396f28bf41b18e282c51368c99e0f48"} Sep 29 21:42:44 crc kubenswrapper[4911]: I0929 21:42:44.750938 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"f8ed0335-40f9-4ef6-be8e-8866dce897f4","Type":"ContainerStarted","Data":"9dcd36572dc071d0f9042d4c2390ca3741f65b5cda38c9ee1ce5ef44699a26ce"} Sep 29 21:42:44 crc kubenswrapper[4911]: I0929 21:42:44.754939 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"615c8a3f-a4f7-4017-aa61-d2525148b6cd","Type":"ContainerStarted","Data":"ebda6268c12c13b5e1ea97f94d525e908bbc59a59f3205fbdce39c700ec2a200"} Sep 29 21:42:44 crc kubenswrapper[4911]: I0929 21:42:44.754982 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"615c8a3f-a4f7-4017-aa61-d2525148b6cd","Type":"ContainerStarted","Data":"5a7ab390c712338daefc738e9555f95982177e9224b524bb4ad2fc68ab4b8236"} Sep 29 21:42:44 crc kubenswrapper[4911]: I0929 21:42:44.762667 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"72f17fb5-75ee-454c-960c-a5e8471f4113","Type":"ContainerStarted","Data":"d05ae18c8fbee53a494b76ed3979ddadd0d9931543944db95ef605afb977b708"} Sep 29 21:42:44 crc kubenswrapper[4911]: I0929 21:42:44.896969 4911 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-79b7f696f8-zxrwh" podUID="40533d70-c10c-4b0e-b870-f50e423ccdc5" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.161:9311/healthcheck\": read tcp 10.217.0.2:42846->10.217.0.161:9311: read: connection reset by peer" Sep 29 21:42:44 crc kubenswrapper[4911]: I0929 21:42:44.897443 4911 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-79b7f696f8-zxrwh" podUID="40533d70-c10c-4b0e-b870-f50e423ccdc5" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.161:9311/healthcheck\": read tcp 10.217.0.2:42834->10.217.0.161:9311: read: connection reset by peer" Sep 29 21:42:45 crc kubenswrapper[4911]: I0929 21:42:45.189249 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Sep 29 21:42:45 crc kubenswrapper[4911]: I0929 21:42:45.289939 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-79b7f696f8-zxrwh" Sep 29 21:42:45 crc kubenswrapper[4911]: I0929 21:42:45.388281 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/40533d70-c10c-4b0e-b870-f50e423ccdc5-config-data-custom\") pod \"40533d70-c10c-4b0e-b870-f50e423ccdc5\" (UID: \"40533d70-c10c-4b0e-b870-f50e423ccdc5\") " Sep 29 21:42:45 crc kubenswrapper[4911]: I0929 21:42:45.388341 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40533d70-c10c-4b0e-b870-f50e423ccdc5-combined-ca-bundle\") pod \"40533d70-c10c-4b0e-b870-f50e423ccdc5\" (UID: \"40533d70-c10c-4b0e-b870-f50e423ccdc5\") " Sep 29 21:42:45 crc kubenswrapper[4911]: I0929 21:42:45.388455 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40533d70-c10c-4b0e-b870-f50e423ccdc5-config-data\") pod \"40533d70-c10c-4b0e-b870-f50e423ccdc5\" (UID: \"40533d70-c10c-4b0e-b870-f50e423ccdc5\") " Sep 29 21:42:45 crc kubenswrapper[4911]: I0929 21:42:45.388508 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-flp5w\" (UniqueName: \"kubernetes.io/projected/40533d70-c10c-4b0e-b870-f50e423ccdc5-kube-api-access-flp5w\") pod \"40533d70-c10c-4b0e-b870-f50e423ccdc5\" (UID: \"40533d70-c10c-4b0e-b870-f50e423ccdc5\") " Sep 29 21:42:45 crc kubenswrapper[4911]: I0929 21:42:45.388537 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/40533d70-c10c-4b0e-b870-f50e423ccdc5-logs\") pod \"40533d70-c10c-4b0e-b870-f50e423ccdc5\" (UID: \"40533d70-c10c-4b0e-b870-f50e423ccdc5\") " Sep 29 21:42:45 crc kubenswrapper[4911]: I0929 21:42:45.389552 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/40533d70-c10c-4b0e-b870-f50e423ccdc5-logs" (OuterVolumeSpecName: "logs") pod "40533d70-c10c-4b0e-b870-f50e423ccdc5" (UID: "40533d70-c10c-4b0e-b870-f50e423ccdc5"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:42:45 crc kubenswrapper[4911]: I0929 21:42:45.390148 4911 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/40533d70-c10c-4b0e-b870-f50e423ccdc5-logs\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:45 crc kubenswrapper[4911]: I0929 21:42:45.401101 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/40533d70-c10c-4b0e-b870-f50e423ccdc5-kube-api-access-flp5w" (OuterVolumeSpecName: "kube-api-access-flp5w") pod "40533d70-c10c-4b0e-b870-f50e423ccdc5" (UID: "40533d70-c10c-4b0e-b870-f50e423ccdc5"). InnerVolumeSpecName "kube-api-access-flp5w". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:42:45 crc kubenswrapper[4911]: I0929 21:42:45.402152 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40533d70-c10c-4b0e-b870-f50e423ccdc5-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "40533d70-c10c-4b0e-b870-f50e423ccdc5" (UID: "40533d70-c10c-4b0e-b870-f50e423ccdc5"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:42:45 crc kubenswrapper[4911]: I0929 21:42:45.442217 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40533d70-c10c-4b0e-b870-f50e423ccdc5-config-data" (OuterVolumeSpecName: "config-data") pod "40533d70-c10c-4b0e-b870-f50e423ccdc5" (UID: "40533d70-c10c-4b0e-b870-f50e423ccdc5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:42:45 crc kubenswrapper[4911]: I0929 21:42:45.457573 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40533d70-c10c-4b0e-b870-f50e423ccdc5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "40533d70-c10c-4b0e-b870-f50e423ccdc5" (UID: "40533d70-c10c-4b0e-b870-f50e423ccdc5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:42:45 crc kubenswrapper[4911]: I0929 21:42:45.492322 4911 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/40533d70-c10c-4b0e-b870-f50e423ccdc5-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:45 crc kubenswrapper[4911]: I0929 21:42:45.492358 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40533d70-c10c-4b0e-b870-f50e423ccdc5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:45 crc kubenswrapper[4911]: I0929 21:42:45.492368 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40533d70-c10c-4b0e-b870-f50e423ccdc5-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:45 crc kubenswrapper[4911]: I0929 21:42:45.492377 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-flp5w\" (UniqueName: \"kubernetes.io/projected/40533d70-c10c-4b0e-b870-f50e423ccdc5-kube-api-access-flp5w\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:45 crc kubenswrapper[4911]: I0929 21:42:45.773309 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"f8ed0335-40f9-4ef6-be8e-8866dce897f4","Type":"ContainerStarted","Data":"8e1bfc56c83547c9483aa11834ae24710ebef24a02162a656011acf68c69df9e"} Sep 29 21:42:45 crc kubenswrapper[4911]: I0929 21:42:45.777533 4911 generic.go:334] "Generic (PLEG): container finished" podID="40533d70-c10c-4b0e-b870-f50e423ccdc5" containerID="d9a05df345f8f6c2fb47fc2effbfb55613e3e6d2fb36ce8168c9188bc9bacb3e" exitCode=0 Sep 29 21:42:45 crc kubenswrapper[4911]: I0929 21:42:45.777581 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-79b7f696f8-zxrwh" event={"ID":"40533d70-c10c-4b0e-b870-f50e423ccdc5","Type":"ContainerDied","Data":"d9a05df345f8f6c2fb47fc2effbfb55613e3e6d2fb36ce8168c9188bc9bacb3e"} Sep 29 21:42:45 crc kubenswrapper[4911]: I0929 21:42:45.777601 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-79b7f696f8-zxrwh" event={"ID":"40533d70-c10c-4b0e-b870-f50e423ccdc5","Type":"ContainerDied","Data":"4d0e8f269b8b41e38396156d7783ae0ed622f527967ee7e497ec59fab11ec871"} Sep 29 21:42:45 crc kubenswrapper[4911]: I0929 21:42:45.777618 4911 scope.go:117] "RemoveContainer" containerID="d9a05df345f8f6c2fb47fc2effbfb55613e3e6d2fb36ce8168c9188bc9bacb3e" Sep 29 21:42:45 crc kubenswrapper[4911]: I0929 21:42:45.777686 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-79b7f696f8-zxrwh" Sep 29 21:42:45 crc kubenswrapper[4911]: I0929 21:42:45.786223 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"72f17fb5-75ee-454c-960c-a5e8471f4113","Type":"ContainerStarted","Data":"ee40c7c0f29713998f365c6970e2cdc2932219b84e34e753ae1c4983cb507fa2"} Sep 29 21:42:45 crc kubenswrapper[4911]: I0929 21:42:45.789699 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-q9jx9" event={"ID":"fcbf9287-d564-45b2-b6be-4cbed087355f","Type":"ContainerStarted","Data":"0f60db69caaf7760661f4272d40eedbed2ff2060dbe12302fec34a8b09c26d13"} Sep 29 21:42:45 crc kubenswrapper[4911]: I0929 21:42:45.789868 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5784cf869f-q9jx9" Sep 29 21:42:45 crc kubenswrapper[4911]: I0929 21:42:45.814486 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5784cf869f-q9jx9" podStartSLOduration=2.81446656 podStartE2EDuration="2.81446656s" podCreationTimestamp="2025-09-29 21:42:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:42:45.812877431 +0000 UTC m=+1043.789990202" watchObservedRunningTime="2025-09-29 21:42:45.81446656 +0000 UTC m=+1043.791579231" Sep 29 21:42:45 crc kubenswrapper[4911]: I0929 21:42:45.816890 4911 scope.go:117] "RemoveContainer" containerID="c74c12f3e3fbd37473edcf5cbd3e6907e4c97317591e7796d4c7d9e742e6cb44" Sep 29 21:42:45 crc kubenswrapper[4911]: I0929 21:42:45.829933 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-79b7f696f8-zxrwh"] Sep 29 21:42:45 crc kubenswrapper[4911]: I0929 21:42:45.837074 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-79b7f696f8-zxrwh"] Sep 29 21:42:45 crc kubenswrapper[4911]: I0929 21:42:45.861949 4911 scope.go:117] "RemoveContainer" containerID="d9a05df345f8f6c2fb47fc2effbfb55613e3e6d2fb36ce8168c9188bc9bacb3e" Sep 29 21:42:45 crc kubenswrapper[4911]: E0929 21:42:45.864763 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d9a05df345f8f6c2fb47fc2effbfb55613e3e6d2fb36ce8168c9188bc9bacb3e\": container with ID starting with d9a05df345f8f6c2fb47fc2effbfb55613e3e6d2fb36ce8168c9188bc9bacb3e not found: ID does not exist" containerID="d9a05df345f8f6c2fb47fc2effbfb55613e3e6d2fb36ce8168c9188bc9bacb3e" Sep 29 21:42:45 crc kubenswrapper[4911]: I0929 21:42:45.864820 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d9a05df345f8f6c2fb47fc2effbfb55613e3e6d2fb36ce8168c9188bc9bacb3e"} err="failed to get container status \"d9a05df345f8f6c2fb47fc2effbfb55613e3e6d2fb36ce8168c9188bc9bacb3e\": rpc error: code = NotFound desc = could not find container \"d9a05df345f8f6c2fb47fc2effbfb55613e3e6d2fb36ce8168c9188bc9bacb3e\": container with ID starting with d9a05df345f8f6c2fb47fc2effbfb55613e3e6d2fb36ce8168c9188bc9bacb3e not found: ID does not exist" Sep 29 21:42:45 crc kubenswrapper[4911]: I0929 21:42:45.864847 4911 scope.go:117] "RemoveContainer" containerID="c74c12f3e3fbd37473edcf5cbd3e6907e4c97317591e7796d4c7d9e742e6cb44" Sep 29 21:42:45 crc kubenswrapper[4911]: E0929 21:42:45.868128 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c74c12f3e3fbd37473edcf5cbd3e6907e4c97317591e7796d4c7d9e742e6cb44\": container with ID starting with c74c12f3e3fbd37473edcf5cbd3e6907e4c97317591e7796d4c7d9e742e6cb44 not found: ID does not exist" containerID="c74c12f3e3fbd37473edcf5cbd3e6907e4c97317591e7796d4c7d9e742e6cb44" Sep 29 21:42:45 crc kubenswrapper[4911]: I0929 21:42:45.868166 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c74c12f3e3fbd37473edcf5cbd3e6907e4c97317591e7796d4c7d9e742e6cb44"} err="failed to get container status \"c74c12f3e3fbd37473edcf5cbd3e6907e4c97317591e7796d4c7d9e742e6cb44\": rpc error: code = NotFound desc = could not find container \"c74c12f3e3fbd37473edcf5cbd3e6907e4c97317591e7796d4c7d9e742e6cb44\": container with ID starting with c74c12f3e3fbd37473edcf5cbd3e6907e4c97317591e7796d4c7d9e742e6cb44 not found: ID does not exist" Sep 29 21:42:46 crc kubenswrapper[4911]: I0929 21:42:46.711000 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="40533d70-c10c-4b0e-b870-f50e423ccdc5" path="/var/lib/kubelet/pods/40533d70-c10c-4b0e-b870-f50e423ccdc5/volumes" Sep 29 21:42:46 crc kubenswrapper[4911]: I0929 21:42:46.739535 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-5c685495c6-r4w7p" Sep 29 21:42:46 crc kubenswrapper[4911]: I0929 21:42:46.805552 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"72f17fb5-75ee-454c-960c-a5e8471f4113","Type":"ContainerStarted","Data":"7ed726dc52735dc226d7e9655508ec48cc1ea1c867059720c80f1512518952ae"} Sep 29 21:42:46 crc kubenswrapper[4911]: I0929 21:42:46.805675 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="72f17fb5-75ee-454c-960c-a5e8471f4113" containerName="cinder-api-log" containerID="cri-o://ee40c7c0f29713998f365c6970e2cdc2932219b84e34e753ae1c4983cb507fa2" gracePeriod=30 Sep 29 21:42:46 crc kubenswrapper[4911]: I0929 21:42:46.805768 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="72f17fb5-75ee-454c-960c-a5e8471f4113" containerName="cinder-api" containerID="cri-o://7ed726dc52735dc226d7e9655508ec48cc1ea1c867059720c80f1512518952ae" gracePeriod=30 Sep 29 21:42:46 crc kubenswrapper[4911]: I0929 21:42:46.806408 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Sep 29 21:42:46 crc kubenswrapper[4911]: I0929 21:42:46.810628 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"f8ed0335-40f9-4ef6-be8e-8866dce897f4","Type":"ContainerStarted","Data":"f6d9871f360fecc11a527fe734d5f7290dfa4b99ea07897033b8b88ea84086f9"} Sep 29 21:42:46 crc kubenswrapper[4911]: I0929 21:42:46.831765 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="615c8a3f-a4f7-4017-aa61-d2525148b6cd" containerName="ceilometer-central-agent" containerID="cri-o://80e3f75c12263b8d6f8808be94f12fae883645d6ede4499e95330531ed7aa9f2" gracePeriod=30 Sep 29 21:42:46 crc kubenswrapper[4911]: I0929 21:42:46.831957 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="615c8a3f-a4f7-4017-aa61-d2525148b6cd" containerName="proxy-httpd" containerID="cri-o://38de83a8b4c0849e5fc22c775d98273066afb0cfda68f4019908328cd6ca6e4c" gracePeriod=30 Sep 29 21:42:46 crc kubenswrapper[4911]: I0929 21:42:46.831996 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="615c8a3f-a4f7-4017-aa61-d2525148b6cd" containerName="sg-core" containerID="cri-o://ebda6268c12c13b5e1ea97f94d525e908bbc59a59f3205fbdce39c700ec2a200" gracePeriod=30 Sep 29 21:42:46 crc kubenswrapper[4911]: I0929 21:42:46.832028 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="615c8a3f-a4f7-4017-aa61-d2525148b6cd" containerName="ceilometer-notification-agent" containerID="cri-o://5a7ab390c712338daefc738e9555f95982177e9224b524bb4ad2fc68ab4b8236" gracePeriod=30 Sep 29 21:42:46 crc kubenswrapper[4911]: I0929 21:42:46.832168 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"615c8a3f-a4f7-4017-aa61-d2525148b6cd","Type":"ContainerStarted","Data":"38de83a8b4c0849e5fc22c775d98273066afb0cfda68f4019908328cd6ca6e4c"} Sep 29 21:42:46 crc kubenswrapper[4911]: I0929 21:42:46.832190 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 29 21:42:46 crc kubenswrapper[4911]: I0929 21:42:46.838578 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.8385609240000003 podStartE2EDuration="3.838560924s" podCreationTimestamp="2025-09-29 21:42:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:42:46.831402072 +0000 UTC m=+1044.808514743" watchObservedRunningTime="2025-09-29 21:42:46.838560924 +0000 UTC m=+1044.815673595" Sep 29 21:42:46 crc kubenswrapper[4911]: I0929 21:42:46.863144 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=4.115800376 podStartE2EDuration="4.863128655s" podCreationTimestamp="2025-09-29 21:42:42 +0000 UTC" firstStartedPulling="2025-09-29 21:42:43.967060891 +0000 UTC m=+1041.944173562" lastFinishedPulling="2025-09-29 21:42:44.71438917 +0000 UTC m=+1042.691501841" observedRunningTime="2025-09-29 21:42:46.857347016 +0000 UTC m=+1044.834459687" watchObservedRunningTime="2025-09-29 21:42:46.863128655 +0000 UTC m=+1044.840241336" Sep 29 21:42:46 crc kubenswrapper[4911]: I0929 21:42:46.884159 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=8.145351206 podStartE2EDuration="11.884145538s" podCreationTimestamp="2025-09-29 21:42:35 +0000 UTC" firstStartedPulling="2025-09-29 21:42:42.14407958 +0000 UTC m=+1040.121192241" lastFinishedPulling="2025-09-29 21:42:45.882873902 +0000 UTC m=+1043.859986573" observedRunningTime="2025-09-29 21:42:46.880098222 +0000 UTC m=+1044.857210893" watchObservedRunningTime="2025-09-29 21:42:46.884145538 +0000 UTC m=+1044.861258209" Sep 29 21:42:47 crc kubenswrapper[4911]: I0929 21:42:47.391919 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 29 21:42:47 crc kubenswrapper[4911]: I0929 21:42:47.427328 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/72f17fb5-75ee-454c-960c-a5e8471f4113-scripts\") pod \"72f17fb5-75ee-454c-960c-a5e8471f4113\" (UID: \"72f17fb5-75ee-454c-960c-a5e8471f4113\") " Sep 29 21:42:47 crc kubenswrapper[4911]: I0929 21:42:47.427369 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72f17fb5-75ee-454c-960c-a5e8471f4113-combined-ca-bundle\") pod \"72f17fb5-75ee-454c-960c-a5e8471f4113\" (UID: \"72f17fb5-75ee-454c-960c-a5e8471f4113\") " Sep 29 21:42:47 crc kubenswrapper[4911]: I0929 21:42:47.427399 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/72f17fb5-75ee-454c-960c-a5e8471f4113-etc-machine-id\") pod \"72f17fb5-75ee-454c-960c-a5e8471f4113\" (UID: \"72f17fb5-75ee-454c-960c-a5e8471f4113\") " Sep 29 21:42:47 crc kubenswrapper[4911]: I0929 21:42:47.427468 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/72f17fb5-75ee-454c-960c-a5e8471f4113-logs\") pod \"72f17fb5-75ee-454c-960c-a5e8471f4113\" (UID: \"72f17fb5-75ee-454c-960c-a5e8471f4113\") " Sep 29 21:42:47 crc kubenswrapper[4911]: I0929 21:42:47.427516 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72f17fb5-75ee-454c-960c-a5e8471f4113-config-data\") pod \"72f17fb5-75ee-454c-960c-a5e8471f4113\" (UID: \"72f17fb5-75ee-454c-960c-a5e8471f4113\") " Sep 29 21:42:47 crc kubenswrapper[4911]: I0929 21:42:47.427543 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/72f17fb5-75ee-454c-960c-a5e8471f4113-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "72f17fb5-75ee-454c-960c-a5e8471f4113" (UID: "72f17fb5-75ee-454c-960c-a5e8471f4113"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 21:42:47 crc kubenswrapper[4911]: I0929 21:42:47.427654 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/72f17fb5-75ee-454c-960c-a5e8471f4113-config-data-custom\") pod \"72f17fb5-75ee-454c-960c-a5e8471f4113\" (UID: \"72f17fb5-75ee-454c-960c-a5e8471f4113\") " Sep 29 21:42:47 crc kubenswrapper[4911]: I0929 21:42:47.427698 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-scrkb\" (UniqueName: \"kubernetes.io/projected/72f17fb5-75ee-454c-960c-a5e8471f4113-kube-api-access-scrkb\") pod \"72f17fb5-75ee-454c-960c-a5e8471f4113\" (UID: \"72f17fb5-75ee-454c-960c-a5e8471f4113\") " Sep 29 21:42:47 crc kubenswrapper[4911]: I0929 21:42:47.428087 4911 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/72f17fb5-75ee-454c-960c-a5e8471f4113-etc-machine-id\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:47 crc kubenswrapper[4911]: I0929 21:42:47.428300 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/72f17fb5-75ee-454c-960c-a5e8471f4113-logs" (OuterVolumeSpecName: "logs") pod "72f17fb5-75ee-454c-960c-a5e8471f4113" (UID: "72f17fb5-75ee-454c-960c-a5e8471f4113"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:42:47 crc kubenswrapper[4911]: I0929 21:42:47.437045 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/72f17fb5-75ee-454c-960c-a5e8471f4113-kube-api-access-scrkb" (OuterVolumeSpecName: "kube-api-access-scrkb") pod "72f17fb5-75ee-454c-960c-a5e8471f4113" (UID: "72f17fb5-75ee-454c-960c-a5e8471f4113"). InnerVolumeSpecName "kube-api-access-scrkb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:42:47 crc kubenswrapper[4911]: I0929 21:42:47.437295 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/72f17fb5-75ee-454c-960c-a5e8471f4113-scripts" (OuterVolumeSpecName: "scripts") pod "72f17fb5-75ee-454c-960c-a5e8471f4113" (UID: "72f17fb5-75ee-454c-960c-a5e8471f4113"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:42:47 crc kubenswrapper[4911]: I0929 21:42:47.440727 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/72f17fb5-75ee-454c-960c-a5e8471f4113-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "72f17fb5-75ee-454c-960c-a5e8471f4113" (UID: "72f17fb5-75ee-454c-960c-a5e8471f4113"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:42:47 crc kubenswrapper[4911]: I0929 21:42:47.462015 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/72f17fb5-75ee-454c-960c-a5e8471f4113-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "72f17fb5-75ee-454c-960c-a5e8471f4113" (UID: "72f17fb5-75ee-454c-960c-a5e8471f4113"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:42:47 crc kubenswrapper[4911]: I0929 21:42:47.486976 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/72f17fb5-75ee-454c-960c-a5e8471f4113-config-data" (OuterVolumeSpecName: "config-data") pod "72f17fb5-75ee-454c-960c-a5e8471f4113" (UID: "72f17fb5-75ee-454c-960c-a5e8471f4113"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:42:47 crc kubenswrapper[4911]: I0929 21:42:47.529939 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72f17fb5-75ee-454c-960c-a5e8471f4113-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:47 crc kubenswrapper[4911]: I0929 21:42:47.529977 4911 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/72f17fb5-75ee-454c-960c-a5e8471f4113-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:47 crc kubenswrapper[4911]: I0929 21:42:47.529990 4911 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/72f17fb5-75ee-454c-960c-a5e8471f4113-logs\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:47 crc kubenswrapper[4911]: I0929 21:42:47.530005 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72f17fb5-75ee-454c-960c-a5e8471f4113-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:47 crc kubenswrapper[4911]: I0929 21:42:47.530019 4911 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/72f17fb5-75ee-454c-960c-a5e8471f4113-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:47 crc kubenswrapper[4911]: I0929 21:42:47.530031 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-scrkb\" (UniqueName: \"kubernetes.io/projected/72f17fb5-75ee-454c-960c-a5e8471f4113-kube-api-access-scrkb\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:47 crc kubenswrapper[4911]: I0929 21:42:47.840240 4911 generic.go:334] "Generic (PLEG): container finished" podID="72f17fb5-75ee-454c-960c-a5e8471f4113" containerID="7ed726dc52735dc226d7e9655508ec48cc1ea1c867059720c80f1512518952ae" exitCode=0 Sep 29 21:42:47 crc kubenswrapper[4911]: I0929 21:42:47.840485 4911 generic.go:334] "Generic (PLEG): container finished" podID="72f17fb5-75ee-454c-960c-a5e8471f4113" containerID="ee40c7c0f29713998f365c6970e2cdc2932219b84e34e753ae1c4983cb507fa2" exitCode=143 Sep 29 21:42:47 crc kubenswrapper[4911]: I0929 21:42:47.840294 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 29 21:42:47 crc kubenswrapper[4911]: I0929 21:42:47.840310 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"72f17fb5-75ee-454c-960c-a5e8471f4113","Type":"ContainerDied","Data":"7ed726dc52735dc226d7e9655508ec48cc1ea1c867059720c80f1512518952ae"} Sep 29 21:42:47 crc kubenswrapper[4911]: I0929 21:42:47.840549 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"72f17fb5-75ee-454c-960c-a5e8471f4113","Type":"ContainerDied","Data":"ee40c7c0f29713998f365c6970e2cdc2932219b84e34e753ae1c4983cb507fa2"} Sep 29 21:42:47 crc kubenswrapper[4911]: I0929 21:42:47.840561 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"72f17fb5-75ee-454c-960c-a5e8471f4113","Type":"ContainerDied","Data":"d05ae18c8fbee53a494b76ed3979ddadd0d9931543944db95ef605afb977b708"} Sep 29 21:42:47 crc kubenswrapper[4911]: I0929 21:42:47.840576 4911 scope.go:117] "RemoveContainer" containerID="7ed726dc52735dc226d7e9655508ec48cc1ea1c867059720c80f1512518952ae" Sep 29 21:42:47 crc kubenswrapper[4911]: I0929 21:42:47.845050 4911 generic.go:334] "Generic (PLEG): container finished" podID="615c8a3f-a4f7-4017-aa61-d2525148b6cd" containerID="38de83a8b4c0849e5fc22c775d98273066afb0cfda68f4019908328cd6ca6e4c" exitCode=0 Sep 29 21:42:47 crc kubenswrapper[4911]: I0929 21:42:47.845154 4911 generic.go:334] "Generic (PLEG): container finished" podID="615c8a3f-a4f7-4017-aa61-d2525148b6cd" containerID="ebda6268c12c13b5e1ea97f94d525e908bbc59a59f3205fbdce39c700ec2a200" exitCode=2 Sep 29 21:42:47 crc kubenswrapper[4911]: I0929 21:42:47.845211 4911 generic.go:334] "Generic (PLEG): container finished" podID="615c8a3f-a4f7-4017-aa61-d2525148b6cd" containerID="5a7ab390c712338daefc738e9555f95982177e9224b524bb4ad2fc68ab4b8236" exitCode=0 Sep 29 21:42:47 crc kubenswrapper[4911]: I0929 21:42:47.845096 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"615c8a3f-a4f7-4017-aa61-d2525148b6cd","Type":"ContainerDied","Data":"38de83a8b4c0849e5fc22c775d98273066afb0cfda68f4019908328cd6ca6e4c"} Sep 29 21:42:47 crc kubenswrapper[4911]: I0929 21:42:47.845950 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"615c8a3f-a4f7-4017-aa61-d2525148b6cd","Type":"ContainerDied","Data":"ebda6268c12c13b5e1ea97f94d525e908bbc59a59f3205fbdce39c700ec2a200"} Sep 29 21:42:47 crc kubenswrapper[4911]: I0929 21:42:47.845984 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"615c8a3f-a4f7-4017-aa61-d2525148b6cd","Type":"ContainerDied","Data":"5a7ab390c712338daefc738e9555f95982177e9224b524bb4ad2fc68ab4b8236"} Sep 29 21:42:47 crc kubenswrapper[4911]: I0929 21:42:47.861139 4911 scope.go:117] "RemoveContainer" containerID="ee40c7c0f29713998f365c6970e2cdc2932219b84e34e753ae1c4983cb507fa2" Sep 29 21:42:47 crc kubenswrapper[4911]: I0929 21:42:47.880835 4911 scope.go:117] "RemoveContainer" containerID="7ed726dc52735dc226d7e9655508ec48cc1ea1c867059720c80f1512518952ae" Sep 29 21:42:47 crc kubenswrapper[4911]: E0929 21:42:47.881290 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7ed726dc52735dc226d7e9655508ec48cc1ea1c867059720c80f1512518952ae\": container with ID starting with 7ed726dc52735dc226d7e9655508ec48cc1ea1c867059720c80f1512518952ae not found: ID does not exist" containerID="7ed726dc52735dc226d7e9655508ec48cc1ea1c867059720c80f1512518952ae" Sep 29 21:42:47 crc kubenswrapper[4911]: I0929 21:42:47.881328 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7ed726dc52735dc226d7e9655508ec48cc1ea1c867059720c80f1512518952ae"} err="failed to get container status \"7ed726dc52735dc226d7e9655508ec48cc1ea1c867059720c80f1512518952ae\": rpc error: code = NotFound desc = could not find container \"7ed726dc52735dc226d7e9655508ec48cc1ea1c867059720c80f1512518952ae\": container with ID starting with 7ed726dc52735dc226d7e9655508ec48cc1ea1c867059720c80f1512518952ae not found: ID does not exist" Sep 29 21:42:47 crc kubenswrapper[4911]: I0929 21:42:47.881359 4911 scope.go:117] "RemoveContainer" containerID="ee40c7c0f29713998f365c6970e2cdc2932219b84e34e753ae1c4983cb507fa2" Sep 29 21:42:47 crc kubenswrapper[4911]: E0929 21:42:47.881602 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ee40c7c0f29713998f365c6970e2cdc2932219b84e34e753ae1c4983cb507fa2\": container with ID starting with ee40c7c0f29713998f365c6970e2cdc2932219b84e34e753ae1c4983cb507fa2 not found: ID does not exist" containerID="ee40c7c0f29713998f365c6970e2cdc2932219b84e34e753ae1c4983cb507fa2" Sep 29 21:42:47 crc kubenswrapper[4911]: I0929 21:42:47.881656 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee40c7c0f29713998f365c6970e2cdc2932219b84e34e753ae1c4983cb507fa2"} err="failed to get container status \"ee40c7c0f29713998f365c6970e2cdc2932219b84e34e753ae1c4983cb507fa2\": rpc error: code = NotFound desc = could not find container \"ee40c7c0f29713998f365c6970e2cdc2932219b84e34e753ae1c4983cb507fa2\": container with ID starting with ee40c7c0f29713998f365c6970e2cdc2932219b84e34e753ae1c4983cb507fa2 not found: ID does not exist" Sep 29 21:42:47 crc kubenswrapper[4911]: I0929 21:42:47.881685 4911 scope.go:117] "RemoveContainer" containerID="7ed726dc52735dc226d7e9655508ec48cc1ea1c867059720c80f1512518952ae" Sep 29 21:42:47 crc kubenswrapper[4911]: I0929 21:42:47.882250 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Sep 29 21:42:47 crc kubenswrapper[4911]: I0929 21:42:47.882447 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7ed726dc52735dc226d7e9655508ec48cc1ea1c867059720c80f1512518952ae"} err="failed to get container status \"7ed726dc52735dc226d7e9655508ec48cc1ea1c867059720c80f1512518952ae\": rpc error: code = NotFound desc = could not find container \"7ed726dc52735dc226d7e9655508ec48cc1ea1c867059720c80f1512518952ae\": container with ID starting with 7ed726dc52735dc226d7e9655508ec48cc1ea1c867059720c80f1512518952ae not found: ID does not exist" Sep 29 21:42:47 crc kubenswrapper[4911]: I0929 21:42:47.882475 4911 scope.go:117] "RemoveContainer" containerID="ee40c7c0f29713998f365c6970e2cdc2932219b84e34e753ae1c4983cb507fa2" Sep 29 21:42:47 crc kubenswrapper[4911]: I0929 21:42:47.882846 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee40c7c0f29713998f365c6970e2cdc2932219b84e34e753ae1c4983cb507fa2"} err="failed to get container status \"ee40c7c0f29713998f365c6970e2cdc2932219b84e34e753ae1c4983cb507fa2\": rpc error: code = NotFound desc = could not find container \"ee40c7c0f29713998f365c6970e2cdc2932219b84e34e753ae1c4983cb507fa2\": container with ID starting with ee40c7c0f29713998f365c6970e2cdc2932219b84e34e753ae1c4983cb507fa2 not found: ID does not exist" Sep 29 21:42:47 crc kubenswrapper[4911]: I0929 21:42:47.894236 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Sep 29 21:42:47 crc kubenswrapper[4911]: I0929 21:42:47.905973 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Sep 29 21:42:47 crc kubenswrapper[4911]: E0929 21:42:47.906455 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72f17fb5-75ee-454c-960c-a5e8471f4113" containerName="cinder-api-log" Sep 29 21:42:47 crc kubenswrapper[4911]: I0929 21:42:47.906477 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="72f17fb5-75ee-454c-960c-a5e8471f4113" containerName="cinder-api-log" Sep 29 21:42:47 crc kubenswrapper[4911]: E0929 21:42:47.906490 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72f17fb5-75ee-454c-960c-a5e8471f4113" containerName="cinder-api" Sep 29 21:42:47 crc kubenswrapper[4911]: I0929 21:42:47.906497 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="72f17fb5-75ee-454c-960c-a5e8471f4113" containerName="cinder-api" Sep 29 21:42:47 crc kubenswrapper[4911]: E0929 21:42:47.906534 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40533d70-c10c-4b0e-b870-f50e423ccdc5" containerName="barbican-api" Sep 29 21:42:47 crc kubenswrapper[4911]: I0929 21:42:47.906540 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="40533d70-c10c-4b0e-b870-f50e423ccdc5" containerName="barbican-api" Sep 29 21:42:47 crc kubenswrapper[4911]: E0929 21:42:47.906557 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40533d70-c10c-4b0e-b870-f50e423ccdc5" containerName="barbican-api-log" Sep 29 21:42:47 crc kubenswrapper[4911]: I0929 21:42:47.906564 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="40533d70-c10c-4b0e-b870-f50e423ccdc5" containerName="barbican-api-log" Sep 29 21:42:47 crc kubenswrapper[4911]: I0929 21:42:47.906729 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="40533d70-c10c-4b0e-b870-f50e423ccdc5" containerName="barbican-api" Sep 29 21:42:47 crc kubenswrapper[4911]: I0929 21:42:47.906748 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="40533d70-c10c-4b0e-b870-f50e423ccdc5" containerName="barbican-api-log" Sep 29 21:42:47 crc kubenswrapper[4911]: I0929 21:42:47.906759 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="72f17fb5-75ee-454c-960c-a5e8471f4113" containerName="cinder-api-log" Sep 29 21:42:47 crc kubenswrapper[4911]: I0929 21:42:47.906770 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="72f17fb5-75ee-454c-960c-a5e8471f4113" containerName="cinder-api" Sep 29 21:42:47 crc kubenswrapper[4911]: I0929 21:42:47.907779 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 29 21:42:47 crc kubenswrapper[4911]: I0929 21:42:47.911008 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Sep 29 21:42:47 crc kubenswrapper[4911]: I0929 21:42:47.911168 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Sep 29 21:42:47 crc kubenswrapper[4911]: I0929 21:42:47.913142 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Sep 29 21:42:47 crc kubenswrapper[4911]: I0929 21:42:47.915082 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Sep 29 21:42:47 crc kubenswrapper[4911]: I0929 21:42:47.935756 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c346bcf5-d568-4d43-87ee-e243f8332bcb-logs\") pod \"cinder-api-0\" (UID: \"c346bcf5-d568-4d43-87ee-e243f8332bcb\") " pod="openstack/cinder-api-0" Sep 29 21:42:47 crc kubenswrapper[4911]: I0929 21:42:47.935820 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c346bcf5-d568-4d43-87ee-e243f8332bcb-config-data\") pod \"cinder-api-0\" (UID: \"c346bcf5-d568-4d43-87ee-e243f8332bcb\") " pod="openstack/cinder-api-0" Sep 29 21:42:47 crc kubenswrapper[4911]: I0929 21:42:47.935875 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c346bcf5-d568-4d43-87ee-e243f8332bcb-config-data-custom\") pod \"cinder-api-0\" (UID: \"c346bcf5-d568-4d43-87ee-e243f8332bcb\") " pod="openstack/cinder-api-0" Sep 29 21:42:47 crc kubenswrapper[4911]: I0929 21:42:47.935956 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c346bcf5-d568-4d43-87ee-e243f8332bcb-etc-machine-id\") pod \"cinder-api-0\" (UID: \"c346bcf5-d568-4d43-87ee-e243f8332bcb\") " pod="openstack/cinder-api-0" Sep 29 21:42:47 crc kubenswrapper[4911]: I0929 21:42:47.935992 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xnzgz\" (UniqueName: \"kubernetes.io/projected/c346bcf5-d568-4d43-87ee-e243f8332bcb-kube-api-access-xnzgz\") pod \"cinder-api-0\" (UID: \"c346bcf5-d568-4d43-87ee-e243f8332bcb\") " pod="openstack/cinder-api-0" Sep 29 21:42:47 crc kubenswrapper[4911]: I0929 21:42:47.936016 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c346bcf5-d568-4d43-87ee-e243f8332bcb-scripts\") pod \"cinder-api-0\" (UID: \"c346bcf5-d568-4d43-87ee-e243f8332bcb\") " pod="openstack/cinder-api-0" Sep 29 21:42:47 crc kubenswrapper[4911]: I0929 21:42:47.936037 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c346bcf5-d568-4d43-87ee-e243f8332bcb-public-tls-certs\") pod \"cinder-api-0\" (UID: \"c346bcf5-d568-4d43-87ee-e243f8332bcb\") " pod="openstack/cinder-api-0" Sep 29 21:42:47 crc kubenswrapper[4911]: I0929 21:42:47.936058 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c346bcf5-d568-4d43-87ee-e243f8332bcb-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"c346bcf5-d568-4d43-87ee-e243f8332bcb\") " pod="openstack/cinder-api-0" Sep 29 21:42:47 crc kubenswrapper[4911]: I0929 21:42:47.936075 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c346bcf5-d568-4d43-87ee-e243f8332bcb-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"c346bcf5-d568-4d43-87ee-e243f8332bcb\") " pod="openstack/cinder-api-0" Sep 29 21:42:48 crc kubenswrapper[4911]: I0929 21:42:48.037872 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c346bcf5-d568-4d43-87ee-e243f8332bcb-etc-machine-id\") pod \"cinder-api-0\" (UID: \"c346bcf5-d568-4d43-87ee-e243f8332bcb\") " pod="openstack/cinder-api-0" Sep 29 21:42:48 crc kubenswrapper[4911]: I0929 21:42:48.037973 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xnzgz\" (UniqueName: \"kubernetes.io/projected/c346bcf5-d568-4d43-87ee-e243f8332bcb-kube-api-access-xnzgz\") pod \"cinder-api-0\" (UID: \"c346bcf5-d568-4d43-87ee-e243f8332bcb\") " pod="openstack/cinder-api-0" Sep 29 21:42:48 crc kubenswrapper[4911]: I0929 21:42:48.038028 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c346bcf5-d568-4d43-87ee-e243f8332bcb-scripts\") pod \"cinder-api-0\" (UID: \"c346bcf5-d568-4d43-87ee-e243f8332bcb\") " pod="openstack/cinder-api-0" Sep 29 21:42:48 crc kubenswrapper[4911]: I0929 21:42:48.038070 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c346bcf5-d568-4d43-87ee-e243f8332bcb-public-tls-certs\") pod \"cinder-api-0\" (UID: \"c346bcf5-d568-4d43-87ee-e243f8332bcb\") " pod="openstack/cinder-api-0" Sep 29 21:42:48 crc kubenswrapper[4911]: I0929 21:42:48.038118 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c346bcf5-d568-4d43-87ee-e243f8332bcb-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"c346bcf5-d568-4d43-87ee-e243f8332bcb\") " pod="openstack/cinder-api-0" Sep 29 21:42:48 crc kubenswrapper[4911]: I0929 21:42:48.038157 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c346bcf5-d568-4d43-87ee-e243f8332bcb-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"c346bcf5-d568-4d43-87ee-e243f8332bcb\") " pod="openstack/cinder-api-0" Sep 29 21:42:48 crc kubenswrapper[4911]: I0929 21:42:48.038226 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c346bcf5-d568-4d43-87ee-e243f8332bcb-logs\") pod \"cinder-api-0\" (UID: \"c346bcf5-d568-4d43-87ee-e243f8332bcb\") " pod="openstack/cinder-api-0" Sep 29 21:42:48 crc kubenswrapper[4911]: I0929 21:42:48.038266 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c346bcf5-d568-4d43-87ee-e243f8332bcb-config-data\") pod \"cinder-api-0\" (UID: \"c346bcf5-d568-4d43-87ee-e243f8332bcb\") " pod="openstack/cinder-api-0" Sep 29 21:42:48 crc kubenswrapper[4911]: I0929 21:42:48.038335 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c346bcf5-d568-4d43-87ee-e243f8332bcb-config-data-custom\") pod \"cinder-api-0\" (UID: \"c346bcf5-d568-4d43-87ee-e243f8332bcb\") " pod="openstack/cinder-api-0" Sep 29 21:42:48 crc kubenswrapper[4911]: I0929 21:42:48.039852 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c346bcf5-d568-4d43-87ee-e243f8332bcb-etc-machine-id\") pod \"cinder-api-0\" (UID: \"c346bcf5-d568-4d43-87ee-e243f8332bcb\") " pod="openstack/cinder-api-0" Sep 29 21:42:48 crc kubenswrapper[4911]: I0929 21:42:48.040194 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c346bcf5-d568-4d43-87ee-e243f8332bcb-logs\") pod \"cinder-api-0\" (UID: \"c346bcf5-d568-4d43-87ee-e243f8332bcb\") " pod="openstack/cinder-api-0" Sep 29 21:42:48 crc kubenswrapper[4911]: I0929 21:42:48.043227 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c346bcf5-d568-4d43-87ee-e243f8332bcb-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"c346bcf5-d568-4d43-87ee-e243f8332bcb\") " pod="openstack/cinder-api-0" Sep 29 21:42:48 crc kubenswrapper[4911]: I0929 21:42:48.043568 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c346bcf5-d568-4d43-87ee-e243f8332bcb-scripts\") pod \"cinder-api-0\" (UID: \"c346bcf5-d568-4d43-87ee-e243f8332bcb\") " pod="openstack/cinder-api-0" Sep 29 21:42:48 crc kubenswrapper[4911]: I0929 21:42:48.043943 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c346bcf5-d568-4d43-87ee-e243f8332bcb-public-tls-certs\") pod \"cinder-api-0\" (UID: \"c346bcf5-d568-4d43-87ee-e243f8332bcb\") " pod="openstack/cinder-api-0" Sep 29 21:42:48 crc kubenswrapper[4911]: I0929 21:42:48.044410 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c346bcf5-d568-4d43-87ee-e243f8332bcb-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"c346bcf5-d568-4d43-87ee-e243f8332bcb\") " pod="openstack/cinder-api-0" Sep 29 21:42:48 crc kubenswrapper[4911]: I0929 21:42:48.045560 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c346bcf5-d568-4d43-87ee-e243f8332bcb-config-data-custom\") pod \"cinder-api-0\" (UID: \"c346bcf5-d568-4d43-87ee-e243f8332bcb\") " pod="openstack/cinder-api-0" Sep 29 21:42:48 crc kubenswrapper[4911]: I0929 21:42:48.048679 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c346bcf5-d568-4d43-87ee-e243f8332bcb-config-data\") pod \"cinder-api-0\" (UID: \"c346bcf5-d568-4d43-87ee-e243f8332bcb\") " pod="openstack/cinder-api-0" Sep 29 21:42:48 crc kubenswrapper[4911]: I0929 21:42:48.062310 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xnzgz\" (UniqueName: \"kubernetes.io/projected/c346bcf5-d568-4d43-87ee-e243f8332bcb-kube-api-access-xnzgz\") pod \"cinder-api-0\" (UID: \"c346bcf5-d568-4d43-87ee-e243f8332bcb\") " pod="openstack/cinder-api-0" Sep 29 21:42:48 crc kubenswrapper[4911]: I0929 21:42:48.247098 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 29 21:42:48 crc kubenswrapper[4911]: I0929 21:42:48.311439 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Sep 29 21:42:48 crc kubenswrapper[4911]: I0929 21:42:48.715911 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="72f17fb5-75ee-454c-960c-a5e8471f4113" path="/var/lib/kubelet/pods/72f17fb5-75ee-454c-960c-a5e8471f4113/volumes" Sep 29 21:42:48 crc kubenswrapper[4911]: I0929 21:42:48.761190 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Sep 29 21:42:48 crc kubenswrapper[4911]: I0929 21:42:48.864324 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"c346bcf5-d568-4d43-87ee-e243f8332bcb","Type":"ContainerStarted","Data":"b231d00079a55f1214619a75ce9f6f4833ac2b941f6fbf5522fcbd5a1cd01382"} Sep 29 21:42:49 crc kubenswrapper[4911]: I0929 21:42:49.278598 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-6b4bcd6f7-dzn4f" Sep 29 21:42:49 crc kubenswrapper[4911]: I0929 21:42:49.330835 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-5c685495c6-r4w7p"] Sep 29 21:42:49 crc kubenswrapper[4911]: I0929 21:42:49.331113 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-5c685495c6-r4w7p" podUID="98530233-359f-4be3-a540-20553e9cbe30" containerName="neutron-api" containerID="cri-o://6fe50da28db4566ab5ceab012dc52bf51b78d4bee83b05f89a556e984c261ccc" gracePeriod=30 Sep 29 21:42:49 crc kubenswrapper[4911]: I0929 21:42:49.331289 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-5c685495c6-r4w7p" podUID="98530233-359f-4be3-a540-20553e9cbe30" containerName="neutron-httpd" containerID="cri-o://92dbffd1527696cbd6e1a9801e0678cf1afeda537edb5f16bbf5f17d2583dc27" gracePeriod=30 Sep 29 21:42:49 crc kubenswrapper[4911]: I0929 21:42:49.890961 4911 generic.go:334] "Generic (PLEG): container finished" podID="615c8a3f-a4f7-4017-aa61-d2525148b6cd" containerID="80e3f75c12263b8d6f8808be94f12fae883645d6ede4499e95330531ed7aa9f2" exitCode=0 Sep 29 21:42:49 crc kubenswrapper[4911]: I0929 21:42:49.891031 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"615c8a3f-a4f7-4017-aa61-d2525148b6cd","Type":"ContainerDied","Data":"80e3f75c12263b8d6f8808be94f12fae883645d6ede4499e95330531ed7aa9f2"} Sep 29 21:42:49 crc kubenswrapper[4911]: I0929 21:42:49.895150 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"c346bcf5-d568-4d43-87ee-e243f8332bcb","Type":"ContainerStarted","Data":"d55eb0a986697e992105878d1c7c328ae938a75fca3bdf24de13d0ce8a465400"} Sep 29 21:42:49 crc kubenswrapper[4911]: I0929 21:42:49.902418 4911 generic.go:334] "Generic (PLEG): container finished" podID="98530233-359f-4be3-a540-20553e9cbe30" containerID="92dbffd1527696cbd6e1a9801e0678cf1afeda537edb5f16bbf5f17d2583dc27" exitCode=0 Sep 29 21:42:49 crc kubenswrapper[4911]: I0929 21:42:49.902461 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5c685495c6-r4w7p" event={"ID":"98530233-359f-4be3-a540-20553e9cbe30","Type":"ContainerDied","Data":"92dbffd1527696cbd6e1a9801e0678cf1afeda537edb5f16bbf5f17d2583dc27"} Sep 29 21:42:50 crc kubenswrapper[4911]: I0929 21:42:50.214502 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-gc5vr"] Sep 29 21:42:50 crc kubenswrapper[4911]: I0929 21:42:50.215642 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-gc5vr" Sep 29 21:42:50 crc kubenswrapper[4911]: I0929 21:42:50.235170 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-gc5vr"] Sep 29 21:42:50 crc kubenswrapper[4911]: I0929 21:42:50.290907 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dk69g\" (UniqueName: \"kubernetes.io/projected/74649df7-81b3-46eb-bd14-0ab5a40d1634-kube-api-access-dk69g\") pod \"nova-api-db-create-gc5vr\" (UID: \"74649df7-81b3-46eb-bd14-0ab5a40d1634\") " pod="openstack/nova-api-db-create-gc5vr" Sep 29 21:42:50 crc kubenswrapper[4911]: I0929 21:42:50.303204 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-g8vhl"] Sep 29 21:42:50 crc kubenswrapper[4911]: I0929 21:42:50.304443 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-g8vhl" Sep 29 21:42:50 crc kubenswrapper[4911]: I0929 21:42:50.305299 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 21:42:50 crc kubenswrapper[4911]: I0929 21:42:50.339848 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-g8vhl"] Sep 29 21:42:50 crc kubenswrapper[4911]: I0929 21:42:50.392130 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/615c8a3f-a4f7-4017-aa61-d2525148b6cd-sg-core-conf-yaml\") pod \"615c8a3f-a4f7-4017-aa61-d2525148b6cd\" (UID: \"615c8a3f-a4f7-4017-aa61-d2525148b6cd\") " Sep 29 21:42:50 crc kubenswrapper[4911]: I0929 21:42:50.392201 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/615c8a3f-a4f7-4017-aa61-d2525148b6cd-config-data\") pod \"615c8a3f-a4f7-4017-aa61-d2525148b6cd\" (UID: \"615c8a3f-a4f7-4017-aa61-d2525148b6cd\") " Sep 29 21:42:50 crc kubenswrapper[4911]: I0929 21:42:50.392246 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zcsls\" (UniqueName: \"kubernetes.io/projected/615c8a3f-a4f7-4017-aa61-d2525148b6cd-kube-api-access-zcsls\") pod \"615c8a3f-a4f7-4017-aa61-d2525148b6cd\" (UID: \"615c8a3f-a4f7-4017-aa61-d2525148b6cd\") " Sep 29 21:42:50 crc kubenswrapper[4911]: I0929 21:42:50.392323 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/615c8a3f-a4f7-4017-aa61-d2525148b6cd-scripts\") pod \"615c8a3f-a4f7-4017-aa61-d2525148b6cd\" (UID: \"615c8a3f-a4f7-4017-aa61-d2525148b6cd\") " Sep 29 21:42:50 crc kubenswrapper[4911]: I0929 21:42:50.392354 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/615c8a3f-a4f7-4017-aa61-d2525148b6cd-run-httpd\") pod \"615c8a3f-a4f7-4017-aa61-d2525148b6cd\" (UID: \"615c8a3f-a4f7-4017-aa61-d2525148b6cd\") " Sep 29 21:42:50 crc kubenswrapper[4911]: I0929 21:42:50.392401 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/615c8a3f-a4f7-4017-aa61-d2525148b6cd-log-httpd\") pod \"615c8a3f-a4f7-4017-aa61-d2525148b6cd\" (UID: \"615c8a3f-a4f7-4017-aa61-d2525148b6cd\") " Sep 29 21:42:50 crc kubenswrapper[4911]: I0929 21:42:50.392424 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/615c8a3f-a4f7-4017-aa61-d2525148b6cd-combined-ca-bundle\") pod \"615c8a3f-a4f7-4017-aa61-d2525148b6cd\" (UID: \"615c8a3f-a4f7-4017-aa61-d2525148b6cd\") " Sep 29 21:42:50 crc kubenswrapper[4911]: I0929 21:42:50.392737 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vkqsg\" (UniqueName: \"kubernetes.io/projected/847ed503-c9fa-4f0b-ac71-6f990bdf2fac-kube-api-access-vkqsg\") pod \"nova-cell0-db-create-g8vhl\" (UID: \"847ed503-c9fa-4f0b-ac71-6f990bdf2fac\") " pod="openstack/nova-cell0-db-create-g8vhl" Sep 29 21:42:50 crc kubenswrapper[4911]: I0929 21:42:50.392823 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dk69g\" (UniqueName: \"kubernetes.io/projected/74649df7-81b3-46eb-bd14-0ab5a40d1634-kube-api-access-dk69g\") pod \"nova-api-db-create-gc5vr\" (UID: \"74649df7-81b3-46eb-bd14-0ab5a40d1634\") " pod="openstack/nova-api-db-create-gc5vr" Sep 29 21:42:50 crc kubenswrapper[4911]: I0929 21:42:50.394223 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/615c8a3f-a4f7-4017-aa61-d2525148b6cd-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "615c8a3f-a4f7-4017-aa61-d2525148b6cd" (UID: "615c8a3f-a4f7-4017-aa61-d2525148b6cd"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:42:50 crc kubenswrapper[4911]: I0929 21:42:50.395135 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/615c8a3f-a4f7-4017-aa61-d2525148b6cd-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "615c8a3f-a4f7-4017-aa61-d2525148b6cd" (UID: "615c8a3f-a4f7-4017-aa61-d2525148b6cd"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:42:50 crc kubenswrapper[4911]: I0929 21:42:50.410510 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dk69g\" (UniqueName: \"kubernetes.io/projected/74649df7-81b3-46eb-bd14-0ab5a40d1634-kube-api-access-dk69g\") pod \"nova-api-db-create-gc5vr\" (UID: \"74649df7-81b3-46eb-bd14-0ab5a40d1634\") " pod="openstack/nova-api-db-create-gc5vr" Sep 29 21:42:50 crc kubenswrapper[4911]: I0929 21:42:50.410778 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-jg9nq"] Sep 29 21:42:50 crc kubenswrapper[4911]: E0929 21:42:50.411684 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="615c8a3f-a4f7-4017-aa61-d2525148b6cd" containerName="sg-core" Sep 29 21:42:50 crc kubenswrapper[4911]: I0929 21:42:50.411780 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="615c8a3f-a4f7-4017-aa61-d2525148b6cd" containerName="sg-core" Sep 29 21:42:50 crc kubenswrapper[4911]: E0929 21:42:50.411923 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="615c8a3f-a4f7-4017-aa61-d2525148b6cd" containerName="ceilometer-notification-agent" Sep 29 21:42:50 crc kubenswrapper[4911]: I0929 21:42:50.412001 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="615c8a3f-a4f7-4017-aa61-d2525148b6cd" containerName="ceilometer-notification-agent" Sep 29 21:42:50 crc kubenswrapper[4911]: E0929 21:42:50.412096 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="615c8a3f-a4f7-4017-aa61-d2525148b6cd" containerName="ceilometer-central-agent" Sep 29 21:42:50 crc kubenswrapper[4911]: I0929 21:42:50.412151 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="615c8a3f-a4f7-4017-aa61-d2525148b6cd" containerName="ceilometer-central-agent" Sep 29 21:42:50 crc kubenswrapper[4911]: E0929 21:42:50.412206 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="615c8a3f-a4f7-4017-aa61-d2525148b6cd" containerName="proxy-httpd" Sep 29 21:42:50 crc kubenswrapper[4911]: I0929 21:42:50.412256 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="615c8a3f-a4f7-4017-aa61-d2525148b6cd" containerName="proxy-httpd" Sep 29 21:42:50 crc kubenswrapper[4911]: I0929 21:42:50.412569 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="615c8a3f-a4f7-4017-aa61-d2525148b6cd" containerName="sg-core" Sep 29 21:42:50 crc kubenswrapper[4911]: I0929 21:42:50.412637 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="615c8a3f-a4f7-4017-aa61-d2525148b6cd" containerName="ceilometer-central-agent" Sep 29 21:42:50 crc kubenswrapper[4911]: I0929 21:42:50.412709 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="615c8a3f-a4f7-4017-aa61-d2525148b6cd" containerName="ceilometer-notification-agent" Sep 29 21:42:50 crc kubenswrapper[4911]: I0929 21:42:50.412769 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="615c8a3f-a4f7-4017-aa61-d2525148b6cd" containerName="proxy-httpd" Sep 29 21:42:50 crc kubenswrapper[4911]: I0929 21:42:50.413601 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-jg9nq" Sep 29 21:42:50 crc kubenswrapper[4911]: I0929 21:42:50.418416 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/615c8a3f-a4f7-4017-aa61-d2525148b6cd-scripts" (OuterVolumeSpecName: "scripts") pod "615c8a3f-a4f7-4017-aa61-d2525148b6cd" (UID: "615c8a3f-a4f7-4017-aa61-d2525148b6cd"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:42:50 crc kubenswrapper[4911]: I0929 21:42:50.418970 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/615c8a3f-a4f7-4017-aa61-d2525148b6cd-kube-api-access-zcsls" (OuterVolumeSpecName: "kube-api-access-zcsls") pod "615c8a3f-a4f7-4017-aa61-d2525148b6cd" (UID: "615c8a3f-a4f7-4017-aa61-d2525148b6cd"). InnerVolumeSpecName "kube-api-access-zcsls". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:42:50 crc kubenswrapper[4911]: I0929 21:42:50.431048 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-jg9nq"] Sep 29 21:42:50 crc kubenswrapper[4911]: I0929 21:42:50.451062 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/615c8a3f-a4f7-4017-aa61-d2525148b6cd-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "615c8a3f-a4f7-4017-aa61-d2525148b6cd" (UID: "615c8a3f-a4f7-4017-aa61-d2525148b6cd"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:42:50 crc kubenswrapper[4911]: I0929 21:42:50.494391 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c8pdv\" (UniqueName: \"kubernetes.io/projected/13437aeb-5dc5-428b-ba77-1265e2f44468-kube-api-access-c8pdv\") pod \"nova-cell1-db-create-jg9nq\" (UID: \"13437aeb-5dc5-428b-ba77-1265e2f44468\") " pod="openstack/nova-cell1-db-create-jg9nq" Sep 29 21:42:50 crc kubenswrapper[4911]: I0929 21:42:50.494459 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vkqsg\" (UniqueName: \"kubernetes.io/projected/847ed503-c9fa-4f0b-ac71-6f990bdf2fac-kube-api-access-vkqsg\") pod \"nova-cell0-db-create-g8vhl\" (UID: \"847ed503-c9fa-4f0b-ac71-6f990bdf2fac\") " pod="openstack/nova-cell0-db-create-g8vhl" Sep 29 21:42:50 crc kubenswrapper[4911]: I0929 21:42:50.494579 4911 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/615c8a3f-a4f7-4017-aa61-d2525148b6cd-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:50 crc kubenswrapper[4911]: I0929 21:42:50.494590 4911 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/615c8a3f-a4f7-4017-aa61-d2525148b6cd-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:50 crc kubenswrapper[4911]: I0929 21:42:50.494600 4911 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/615c8a3f-a4f7-4017-aa61-d2525148b6cd-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:50 crc kubenswrapper[4911]: I0929 21:42:50.494610 4911 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/615c8a3f-a4f7-4017-aa61-d2525148b6cd-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:50 crc kubenswrapper[4911]: I0929 21:42:50.494619 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zcsls\" (UniqueName: \"kubernetes.io/projected/615c8a3f-a4f7-4017-aa61-d2525148b6cd-kube-api-access-zcsls\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:50 crc kubenswrapper[4911]: I0929 21:42:50.497399 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/615c8a3f-a4f7-4017-aa61-d2525148b6cd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "615c8a3f-a4f7-4017-aa61-d2525148b6cd" (UID: "615c8a3f-a4f7-4017-aa61-d2525148b6cd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:42:50 crc kubenswrapper[4911]: I0929 21:42:50.511328 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vkqsg\" (UniqueName: \"kubernetes.io/projected/847ed503-c9fa-4f0b-ac71-6f990bdf2fac-kube-api-access-vkqsg\") pod \"nova-cell0-db-create-g8vhl\" (UID: \"847ed503-c9fa-4f0b-ac71-6f990bdf2fac\") " pod="openstack/nova-cell0-db-create-g8vhl" Sep 29 21:42:50 crc kubenswrapper[4911]: I0929 21:42:50.523119 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/615c8a3f-a4f7-4017-aa61-d2525148b6cd-config-data" (OuterVolumeSpecName: "config-data") pod "615c8a3f-a4f7-4017-aa61-d2525148b6cd" (UID: "615c8a3f-a4f7-4017-aa61-d2525148b6cd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:42:50 crc kubenswrapper[4911]: I0929 21:42:50.595989 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c8pdv\" (UniqueName: \"kubernetes.io/projected/13437aeb-5dc5-428b-ba77-1265e2f44468-kube-api-access-c8pdv\") pod \"nova-cell1-db-create-jg9nq\" (UID: \"13437aeb-5dc5-428b-ba77-1265e2f44468\") " pod="openstack/nova-cell1-db-create-jg9nq" Sep 29 21:42:50 crc kubenswrapper[4911]: I0929 21:42:50.596175 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/615c8a3f-a4f7-4017-aa61-d2525148b6cd-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:50 crc kubenswrapper[4911]: I0929 21:42:50.596188 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/615c8a3f-a4f7-4017-aa61-d2525148b6cd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:50 crc kubenswrapper[4911]: I0929 21:42:50.612184 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c8pdv\" (UniqueName: \"kubernetes.io/projected/13437aeb-5dc5-428b-ba77-1265e2f44468-kube-api-access-c8pdv\") pod \"nova-cell1-db-create-jg9nq\" (UID: \"13437aeb-5dc5-428b-ba77-1265e2f44468\") " pod="openstack/nova-cell1-db-create-jg9nq" Sep 29 21:42:50 crc kubenswrapper[4911]: I0929 21:42:50.620215 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-gc5vr" Sep 29 21:42:50 crc kubenswrapper[4911]: I0929 21:42:50.629418 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-g8vhl" Sep 29 21:42:50 crc kubenswrapper[4911]: I0929 21:42:50.764458 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-jg9nq" Sep 29 21:42:50 crc kubenswrapper[4911]: I0929 21:42:50.922629 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"c346bcf5-d568-4d43-87ee-e243f8332bcb","Type":"ContainerStarted","Data":"f113f29bebfdbb6132013df3d71f2c9d3acdba37d2aa2b9f08fe48372947c989"} Sep 29 21:42:50 crc kubenswrapper[4911]: I0929 21:42:50.923030 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Sep 29 21:42:50 crc kubenswrapper[4911]: I0929 21:42:50.927338 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"615c8a3f-a4f7-4017-aa61-d2525148b6cd","Type":"ContainerDied","Data":"9606cbb1f625e7ad8f80a17e1969446b520129278a1972046a109602c3e22b59"} Sep 29 21:42:50 crc kubenswrapper[4911]: I0929 21:42:50.927394 4911 scope.go:117] "RemoveContainer" containerID="38de83a8b4c0849e5fc22c775d98273066afb0cfda68f4019908328cd6ca6e4c" Sep 29 21:42:50 crc kubenswrapper[4911]: I0929 21:42:50.930691 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 21:42:51 crc kubenswrapper[4911]: I0929 21:42:50.994780 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.994757783 podStartE2EDuration="3.994757783s" podCreationTimestamp="2025-09-29 21:42:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:42:50.939948293 +0000 UTC m=+1048.917060964" watchObservedRunningTime="2025-09-29 21:42:50.994757783 +0000 UTC m=+1048.971870454" Sep 29 21:42:51 crc kubenswrapper[4911]: I0929 21:42:51.083256 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 21:42:51 crc kubenswrapper[4911]: I0929 21:42:51.084430 4911 scope.go:117] "RemoveContainer" containerID="ebda6268c12c13b5e1ea97f94d525e908bbc59a59f3205fbdce39c700ec2a200" Sep 29 21:42:51 crc kubenswrapper[4911]: I0929 21:42:51.090664 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 29 21:42:51 crc kubenswrapper[4911]: I0929 21:42:51.099044 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 29 21:42:51 crc kubenswrapper[4911]: I0929 21:42:51.101543 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 21:42:51 crc kubenswrapper[4911]: I0929 21:42:51.107702 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 29 21:42:51 crc kubenswrapper[4911]: I0929 21:42:51.107902 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 29 21:42:51 crc kubenswrapper[4911]: I0929 21:42:51.116617 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 21:42:51 crc kubenswrapper[4911]: I0929 21:42:51.119867 4911 scope.go:117] "RemoveContainer" containerID="5a7ab390c712338daefc738e9555f95982177e9224b524bb4ad2fc68ab4b8236" Sep 29 21:42:51 crc kubenswrapper[4911]: I0929 21:42:51.160097 4911 scope.go:117] "RemoveContainer" containerID="80e3f75c12263b8d6f8808be94f12fae883645d6ede4499e95330531ed7aa9f2" Sep 29 21:42:51 crc kubenswrapper[4911]: I0929 21:42:51.215611 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea\") " pod="openstack/ceilometer-0" Sep 29 21:42:51 crc kubenswrapper[4911]: I0929 21:42:51.215749 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x4z5p\" (UniqueName: \"kubernetes.io/projected/0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea-kube-api-access-x4z5p\") pod \"ceilometer-0\" (UID: \"0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea\") " pod="openstack/ceilometer-0" Sep 29 21:42:51 crc kubenswrapper[4911]: I0929 21:42:51.215802 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea-config-data\") pod \"ceilometer-0\" (UID: \"0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea\") " pod="openstack/ceilometer-0" Sep 29 21:42:51 crc kubenswrapper[4911]: I0929 21:42:51.215822 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea-log-httpd\") pod \"ceilometer-0\" (UID: \"0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea\") " pod="openstack/ceilometer-0" Sep 29 21:42:51 crc kubenswrapper[4911]: I0929 21:42:51.216385 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea-scripts\") pod \"ceilometer-0\" (UID: \"0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea\") " pod="openstack/ceilometer-0" Sep 29 21:42:51 crc kubenswrapper[4911]: I0929 21:42:51.217236 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea\") " pod="openstack/ceilometer-0" Sep 29 21:42:51 crc kubenswrapper[4911]: I0929 21:42:51.217312 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea-run-httpd\") pod \"ceilometer-0\" (UID: \"0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea\") " pod="openstack/ceilometer-0" Sep 29 21:42:51 crc kubenswrapper[4911]: I0929 21:42:51.319919 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea\") " pod="openstack/ceilometer-0" Sep 29 21:42:51 crc kubenswrapper[4911]: I0929 21:42:51.319993 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea-run-httpd\") pod \"ceilometer-0\" (UID: \"0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea\") " pod="openstack/ceilometer-0" Sep 29 21:42:51 crc kubenswrapper[4911]: I0929 21:42:51.320022 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea\") " pod="openstack/ceilometer-0" Sep 29 21:42:51 crc kubenswrapper[4911]: I0929 21:42:51.320090 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x4z5p\" (UniqueName: \"kubernetes.io/projected/0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea-kube-api-access-x4z5p\") pod \"ceilometer-0\" (UID: \"0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea\") " pod="openstack/ceilometer-0" Sep 29 21:42:51 crc kubenswrapper[4911]: I0929 21:42:51.320162 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea-config-data\") pod \"ceilometer-0\" (UID: \"0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea\") " pod="openstack/ceilometer-0" Sep 29 21:42:51 crc kubenswrapper[4911]: I0929 21:42:51.320178 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea-log-httpd\") pod \"ceilometer-0\" (UID: \"0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea\") " pod="openstack/ceilometer-0" Sep 29 21:42:51 crc kubenswrapper[4911]: I0929 21:42:51.320268 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea-scripts\") pod \"ceilometer-0\" (UID: \"0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea\") " pod="openstack/ceilometer-0" Sep 29 21:42:51 crc kubenswrapper[4911]: I0929 21:42:51.321582 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea-run-httpd\") pod \"ceilometer-0\" (UID: \"0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea\") " pod="openstack/ceilometer-0" Sep 29 21:42:51 crc kubenswrapper[4911]: I0929 21:42:51.321924 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea-log-httpd\") pod \"ceilometer-0\" (UID: \"0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea\") " pod="openstack/ceilometer-0" Sep 29 21:42:51 crc kubenswrapper[4911]: I0929 21:42:51.336314 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea\") " pod="openstack/ceilometer-0" Sep 29 21:42:51 crc kubenswrapper[4911]: I0929 21:42:51.336566 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea-config-data\") pod \"ceilometer-0\" (UID: \"0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea\") " pod="openstack/ceilometer-0" Sep 29 21:42:51 crc kubenswrapper[4911]: I0929 21:42:51.336900 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea\") " pod="openstack/ceilometer-0" Sep 29 21:42:51 crc kubenswrapper[4911]: I0929 21:42:51.340705 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea-scripts\") pod \"ceilometer-0\" (UID: \"0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea\") " pod="openstack/ceilometer-0" Sep 29 21:42:51 crc kubenswrapper[4911]: I0929 21:42:51.343140 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x4z5p\" (UniqueName: \"kubernetes.io/projected/0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea-kube-api-access-x4z5p\") pod \"ceilometer-0\" (UID: \"0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea\") " pod="openstack/ceilometer-0" Sep 29 21:42:51 crc kubenswrapper[4911]: I0929 21:42:51.347117 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-gc5vr"] Sep 29 21:42:51 crc kubenswrapper[4911]: I0929 21:42:51.426590 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-g8vhl"] Sep 29 21:42:51 crc kubenswrapper[4911]: I0929 21:42:51.429905 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 21:42:51 crc kubenswrapper[4911]: I0929 21:42:51.436203 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-jg9nq"] Sep 29 21:42:51 crc kubenswrapper[4911]: I0929 21:42:51.941063 4911 generic.go:334] "Generic (PLEG): container finished" podID="847ed503-c9fa-4f0b-ac71-6f990bdf2fac" containerID="f43a8fac91d8f04fe6616c91fd478bef02733c439ff2910c77d32fbbdd630e32" exitCode=0 Sep 29 21:42:51 crc kubenswrapper[4911]: I0929 21:42:51.942081 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-g8vhl" event={"ID":"847ed503-c9fa-4f0b-ac71-6f990bdf2fac","Type":"ContainerDied","Data":"f43a8fac91d8f04fe6616c91fd478bef02733c439ff2910c77d32fbbdd630e32"} Sep 29 21:42:51 crc kubenswrapper[4911]: I0929 21:42:51.942122 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-g8vhl" event={"ID":"847ed503-c9fa-4f0b-ac71-6f990bdf2fac","Type":"ContainerStarted","Data":"7f5456943b7932b285a86e8602c72842e6c1e77a6dd24260bf5154cbae9465b2"} Sep 29 21:42:51 crc kubenswrapper[4911]: I0929 21:42:51.946103 4911 generic.go:334] "Generic (PLEG): container finished" podID="13437aeb-5dc5-428b-ba77-1265e2f44468" containerID="808ad20d12061e1c2b0ba909bc0fd0424a6636624ba4772feeab19621ae307e1" exitCode=0 Sep 29 21:42:51 crc kubenswrapper[4911]: I0929 21:42:51.946225 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-jg9nq" event={"ID":"13437aeb-5dc5-428b-ba77-1265e2f44468","Type":"ContainerDied","Data":"808ad20d12061e1c2b0ba909bc0fd0424a6636624ba4772feeab19621ae307e1"} Sep 29 21:42:51 crc kubenswrapper[4911]: I0929 21:42:51.946275 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-jg9nq" event={"ID":"13437aeb-5dc5-428b-ba77-1265e2f44468","Type":"ContainerStarted","Data":"26498a7b8476bbcbf90ae13338dc6e84979ba0f757d497e47fd2081292dc0b97"} Sep 29 21:42:51 crc kubenswrapper[4911]: I0929 21:42:51.955019 4911 generic.go:334] "Generic (PLEG): container finished" podID="74649df7-81b3-46eb-bd14-0ab5a40d1634" containerID="5cd972f82ce850ec1fb9160011aac74fd03f585fa9d927cd89c4be3f1f9f6f7f" exitCode=0 Sep 29 21:42:51 crc kubenswrapper[4911]: I0929 21:42:51.955403 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-gc5vr" event={"ID":"74649df7-81b3-46eb-bd14-0ab5a40d1634","Type":"ContainerDied","Data":"5cd972f82ce850ec1fb9160011aac74fd03f585fa9d927cd89c4be3f1f9f6f7f"} Sep 29 21:42:51 crc kubenswrapper[4911]: I0929 21:42:51.955452 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-gc5vr" event={"ID":"74649df7-81b3-46eb-bd14-0ab5a40d1634","Type":"ContainerStarted","Data":"06597bdaf200ad63df747dfec2b5975b852ce1210263031a46382fa4218ee770"} Sep 29 21:42:51 crc kubenswrapper[4911]: I0929 21:42:51.966943 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 21:42:52 crc kubenswrapper[4911]: I0929 21:42:52.714671 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="615c8a3f-a4f7-4017-aa61-d2525148b6cd" path="/var/lib/kubelet/pods/615c8a3f-a4f7-4017-aa61-d2525148b6cd/volumes" Sep 29 21:42:52 crc kubenswrapper[4911]: I0929 21:42:52.966993 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea","Type":"ContainerStarted","Data":"bafe12cca95dec6a89fa926b8bfc5327ea95f7391235f4677746fb91b9f8bbf1"} Sep 29 21:42:52 crc kubenswrapper[4911]: I0929 21:42:52.967301 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea","Type":"ContainerStarted","Data":"b4ccc5d1ac9d6c43623de263bb95f090dd181a4e208532ebddc78b0a9acce160"} Sep 29 21:42:53 crc kubenswrapper[4911]: I0929 21:42:53.433871 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5784cf869f-q9jx9" Sep 29 21:42:53 crc kubenswrapper[4911]: I0929 21:42:53.453324 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-gc5vr" Sep 29 21:42:53 crc kubenswrapper[4911]: I0929 21:42:53.515385 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-75c8ddd69c-jqmrp"] Sep 29 21:42:53 crc kubenswrapper[4911]: I0929 21:42:53.515614 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-75c8ddd69c-jqmrp" podUID="7eac87a1-5eb9-48e6-a3f1-f0a78d494674" containerName="dnsmasq-dns" containerID="cri-o://a59ef7cf8d8801d59cdcbd879a6aed303345bcd05fd7048905075c1614d5deeb" gracePeriod=10 Sep 29 21:42:53 crc kubenswrapper[4911]: I0929 21:42:53.560460 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dk69g\" (UniqueName: \"kubernetes.io/projected/74649df7-81b3-46eb-bd14-0ab5a40d1634-kube-api-access-dk69g\") pod \"74649df7-81b3-46eb-bd14-0ab5a40d1634\" (UID: \"74649df7-81b3-46eb-bd14-0ab5a40d1634\") " Sep 29 21:42:53 crc kubenswrapper[4911]: I0929 21:42:53.565281 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/74649df7-81b3-46eb-bd14-0ab5a40d1634-kube-api-access-dk69g" (OuterVolumeSpecName: "kube-api-access-dk69g") pod "74649df7-81b3-46eb-bd14-0ab5a40d1634" (UID: "74649df7-81b3-46eb-bd14-0ab5a40d1634"). InnerVolumeSpecName "kube-api-access-dk69g". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:42:53 crc kubenswrapper[4911]: I0929 21:42:53.598672 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Sep 29 21:42:53 crc kubenswrapper[4911]: I0929 21:42:53.671386 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-jg9nq" Sep 29 21:42:53 crc kubenswrapper[4911]: I0929 21:42:53.671747 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dk69g\" (UniqueName: \"kubernetes.io/projected/74649df7-81b3-46eb-bd14-0ab5a40d1634-kube-api-access-dk69g\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:53 crc kubenswrapper[4911]: I0929 21:42:53.674241 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 21:42:53 crc kubenswrapper[4911]: I0929 21:42:53.674520 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-g8vhl" Sep 29 21:42:53 crc kubenswrapper[4911]: I0929 21:42:53.773241 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c8pdv\" (UniqueName: \"kubernetes.io/projected/13437aeb-5dc5-428b-ba77-1265e2f44468-kube-api-access-c8pdv\") pod \"13437aeb-5dc5-428b-ba77-1265e2f44468\" (UID: \"13437aeb-5dc5-428b-ba77-1265e2f44468\") " Sep 29 21:42:53 crc kubenswrapper[4911]: I0929 21:42:53.773368 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vkqsg\" (UniqueName: \"kubernetes.io/projected/847ed503-c9fa-4f0b-ac71-6f990bdf2fac-kube-api-access-vkqsg\") pod \"847ed503-c9fa-4f0b-ac71-6f990bdf2fac\" (UID: \"847ed503-c9fa-4f0b-ac71-6f990bdf2fac\") " Sep 29 21:42:53 crc kubenswrapper[4911]: I0929 21:42:53.778731 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/13437aeb-5dc5-428b-ba77-1265e2f44468-kube-api-access-c8pdv" (OuterVolumeSpecName: "kube-api-access-c8pdv") pod "13437aeb-5dc5-428b-ba77-1265e2f44468" (UID: "13437aeb-5dc5-428b-ba77-1265e2f44468"). InnerVolumeSpecName "kube-api-access-c8pdv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:42:53 crc kubenswrapper[4911]: I0929 21:42:53.780891 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c8pdv\" (UniqueName: \"kubernetes.io/projected/13437aeb-5dc5-428b-ba77-1265e2f44468-kube-api-access-c8pdv\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:53 crc kubenswrapper[4911]: I0929 21:42:53.780443 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/847ed503-c9fa-4f0b-ac71-6f990bdf2fac-kube-api-access-vkqsg" (OuterVolumeSpecName: "kube-api-access-vkqsg") pod "847ed503-c9fa-4f0b-ac71-6f990bdf2fac" (UID: "847ed503-c9fa-4f0b-ac71-6f990bdf2fac"). InnerVolumeSpecName "kube-api-access-vkqsg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:42:53 crc kubenswrapper[4911]: I0929 21:42:53.883628 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vkqsg\" (UniqueName: \"kubernetes.io/projected/847ed503-c9fa-4f0b-ac71-6f990bdf2fac-kube-api-access-vkqsg\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:53 crc kubenswrapper[4911]: I0929 21:42:53.988541 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-gc5vr" Sep 29 21:42:53 crc kubenswrapper[4911]: I0929 21:42:53.989517 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-gc5vr" event={"ID":"74649df7-81b3-46eb-bd14-0ab5a40d1634","Type":"ContainerDied","Data":"06597bdaf200ad63df747dfec2b5975b852ce1210263031a46382fa4218ee770"} Sep 29 21:42:53 crc kubenswrapper[4911]: I0929 21:42:53.989556 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="06597bdaf200ad63df747dfec2b5975b852ce1210263031a46382fa4218ee770" Sep 29 21:42:54 crc kubenswrapper[4911]: I0929 21:42:54.004305 4911 generic.go:334] "Generic (PLEG): container finished" podID="7eac87a1-5eb9-48e6-a3f1-f0a78d494674" containerID="a59ef7cf8d8801d59cdcbd879a6aed303345bcd05fd7048905075c1614d5deeb" exitCode=0 Sep 29 21:42:54 crc kubenswrapper[4911]: I0929 21:42:54.004582 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75c8ddd69c-jqmrp" event={"ID":"7eac87a1-5eb9-48e6-a3f1-f0a78d494674","Type":"ContainerDied","Data":"a59ef7cf8d8801d59cdcbd879a6aed303345bcd05fd7048905075c1614d5deeb"} Sep 29 21:42:54 crc kubenswrapper[4911]: I0929 21:42:54.017559 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea","Type":"ContainerStarted","Data":"1378dc73e108e9e426635444f00480be775a77507b09467731ca704d8db1a656"} Sep 29 21:42:54 crc kubenswrapper[4911]: I0929 21:42:54.022019 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75c8ddd69c-jqmrp" Sep 29 21:42:54 crc kubenswrapper[4911]: I0929 21:42:54.024306 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-g8vhl" event={"ID":"847ed503-c9fa-4f0b-ac71-6f990bdf2fac","Type":"ContainerDied","Data":"7f5456943b7932b285a86e8602c72842e6c1e77a6dd24260bf5154cbae9465b2"} Sep 29 21:42:54 crc kubenswrapper[4911]: I0929 21:42:54.024345 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7f5456943b7932b285a86e8602c72842e6c1e77a6dd24260bf5154cbae9465b2" Sep 29 21:42:54 crc kubenswrapper[4911]: I0929 21:42:54.024402 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-g8vhl" Sep 29 21:42:54 crc kubenswrapper[4911]: I0929 21:42:54.026279 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-jg9nq" event={"ID":"13437aeb-5dc5-428b-ba77-1265e2f44468","Type":"ContainerDied","Data":"26498a7b8476bbcbf90ae13338dc6e84979ba0f757d497e47fd2081292dc0b97"} Sep 29 21:42:54 crc kubenswrapper[4911]: I0929 21:42:54.026321 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="26498a7b8476bbcbf90ae13338dc6e84979ba0f757d497e47fd2081292dc0b97" Sep 29 21:42:54 crc kubenswrapper[4911]: I0929 21:42:54.026343 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="f8ed0335-40f9-4ef6-be8e-8866dce897f4" containerName="cinder-scheduler" containerID="cri-o://8e1bfc56c83547c9483aa11834ae24710ebef24a02162a656011acf68c69df9e" gracePeriod=30 Sep 29 21:42:54 crc kubenswrapper[4911]: I0929 21:42:54.026400 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-jg9nq" Sep 29 21:42:54 crc kubenswrapper[4911]: I0929 21:42:54.026466 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="f8ed0335-40f9-4ef6-be8e-8866dce897f4" containerName="probe" containerID="cri-o://f6d9871f360fecc11a527fe734d5f7290dfa4b99ea07897033b8b88ea84086f9" gracePeriod=30 Sep 29 21:42:54 crc kubenswrapper[4911]: I0929 21:42:54.087424 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7eac87a1-5eb9-48e6-a3f1-f0a78d494674-config\") pod \"7eac87a1-5eb9-48e6-a3f1-f0a78d494674\" (UID: \"7eac87a1-5eb9-48e6-a3f1-f0a78d494674\") " Sep 29 21:42:54 crc kubenswrapper[4911]: I0929 21:42:54.087475 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7eac87a1-5eb9-48e6-a3f1-f0a78d494674-ovsdbserver-nb\") pod \"7eac87a1-5eb9-48e6-a3f1-f0a78d494674\" (UID: \"7eac87a1-5eb9-48e6-a3f1-f0a78d494674\") " Sep 29 21:42:54 crc kubenswrapper[4911]: I0929 21:42:54.087578 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7eac87a1-5eb9-48e6-a3f1-f0a78d494674-dns-svc\") pod \"7eac87a1-5eb9-48e6-a3f1-f0a78d494674\" (UID: \"7eac87a1-5eb9-48e6-a3f1-f0a78d494674\") " Sep 29 21:42:54 crc kubenswrapper[4911]: I0929 21:42:54.087634 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7eac87a1-5eb9-48e6-a3f1-f0a78d494674-dns-swift-storage-0\") pod \"7eac87a1-5eb9-48e6-a3f1-f0a78d494674\" (UID: \"7eac87a1-5eb9-48e6-a3f1-f0a78d494674\") " Sep 29 21:42:54 crc kubenswrapper[4911]: I0929 21:42:54.087680 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sr4b2\" (UniqueName: \"kubernetes.io/projected/7eac87a1-5eb9-48e6-a3f1-f0a78d494674-kube-api-access-sr4b2\") pod \"7eac87a1-5eb9-48e6-a3f1-f0a78d494674\" (UID: \"7eac87a1-5eb9-48e6-a3f1-f0a78d494674\") " Sep 29 21:42:54 crc kubenswrapper[4911]: I0929 21:42:54.087817 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7eac87a1-5eb9-48e6-a3f1-f0a78d494674-ovsdbserver-sb\") pod \"7eac87a1-5eb9-48e6-a3f1-f0a78d494674\" (UID: \"7eac87a1-5eb9-48e6-a3f1-f0a78d494674\") " Sep 29 21:42:54 crc kubenswrapper[4911]: I0929 21:42:54.113555 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7eac87a1-5eb9-48e6-a3f1-f0a78d494674-kube-api-access-sr4b2" (OuterVolumeSpecName: "kube-api-access-sr4b2") pod "7eac87a1-5eb9-48e6-a3f1-f0a78d494674" (UID: "7eac87a1-5eb9-48e6-a3f1-f0a78d494674"). InnerVolumeSpecName "kube-api-access-sr4b2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:42:54 crc kubenswrapper[4911]: I0929 21:42:54.136654 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7eac87a1-5eb9-48e6-a3f1-f0a78d494674-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "7eac87a1-5eb9-48e6-a3f1-f0a78d494674" (UID: "7eac87a1-5eb9-48e6-a3f1-f0a78d494674"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:42:54 crc kubenswrapper[4911]: I0929 21:42:54.155611 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7eac87a1-5eb9-48e6-a3f1-f0a78d494674-config" (OuterVolumeSpecName: "config") pod "7eac87a1-5eb9-48e6-a3f1-f0a78d494674" (UID: "7eac87a1-5eb9-48e6-a3f1-f0a78d494674"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:42:54 crc kubenswrapper[4911]: I0929 21:42:54.168962 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7eac87a1-5eb9-48e6-a3f1-f0a78d494674-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "7eac87a1-5eb9-48e6-a3f1-f0a78d494674" (UID: "7eac87a1-5eb9-48e6-a3f1-f0a78d494674"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:42:54 crc kubenswrapper[4911]: I0929 21:42:54.184008 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7eac87a1-5eb9-48e6-a3f1-f0a78d494674-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "7eac87a1-5eb9-48e6-a3f1-f0a78d494674" (UID: "7eac87a1-5eb9-48e6-a3f1-f0a78d494674"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:42:54 crc kubenswrapper[4911]: I0929 21:42:54.190599 4911 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7eac87a1-5eb9-48e6-a3f1-f0a78d494674-config\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:54 crc kubenswrapper[4911]: I0929 21:42:54.190831 4911 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7eac87a1-5eb9-48e6-a3f1-f0a78d494674-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:54 crc kubenswrapper[4911]: I0929 21:42:54.190842 4911 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7eac87a1-5eb9-48e6-a3f1-f0a78d494674-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:54 crc kubenswrapper[4911]: I0929 21:42:54.190852 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sr4b2\" (UniqueName: \"kubernetes.io/projected/7eac87a1-5eb9-48e6-a3f1-f0a78d494674-kube-api-access-sr4b2\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:54 crc kubenswrapper[4911]: I0929 21:42:54.190859 4911 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7eac87a1-5eb9-48e6-a3f1-f0a78d494674-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:54 crc kubenswrapper[4911]: I0929 21:42:54.209967 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7eac87a1-5eb9-48e6-a3f1-f0a78d494674-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "7eac87a1-5eb9-48e6-a3f1-f0a78d494674" (UID: "7eac87a1-5eb9-48e6-a3f1-f0a78d494674"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:42:54 crc kubenswrapper[4911]: I0929 21:42:54.292114 4911 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7eac87a1-5eb9-48e6-a3f1-f0a78d494674-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:55 crc kubenswrapper[4911]: I0929 21:42:55.042419 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea","Type":"ContainerStarted","Data":"eff1276702b5642b5eaee71d8541b4a6097d7dd7d53af87a8621d5d82a3fd5d9"} Sep 29 21:42:55 crc kubenswrapper[4911]: I0929 21:42:55.046974 4911 generic.go:334] "Generic (PLEG): container finished" podID="f8ed0335-40f9-4ef6-be8e-8866dce897f4" containerID="f6d9871f360fecc11a527fe734d5f7290dfa4b99ea07897033b8b88ea84086f9" exitCode=0 Sep 29 21:42:55 crc kubenswrapper[4911]: I0929 21:42:55.047052 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"f8ed0335-40f9-4ef6-be8e-8866dce897f4","Type":"ContainerDied","Data":"f6d9871f360fecc11a527fe734d5f7290dfa4b99ea07897033b8b88ea84086f9"} Sep 29 21:42:55 crc kubenswrapper[4911]: I0929 21:42:55.049831 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75c8ddd69c-jqmrp" event={"ID":"7eac87a1-5eb9-48e6-a3f1-f0a78d494674","Type":"ContainerDied","Data":"ad864286b8614d0b7c510f5f787578fe5fcd421d2a79e1eb388615b9d3c7e3de"} Sep 29 21:42:55 crc kubenswrapper[4911]: I0929 21:42:55.049863 4911 scope.go:117] "RemoveContainer" containerID="a59ef7cf8d8801d59cdcbd879a6aed303345bcd05fd7048905075c1614d5deeb" Sep 29 21:42:55 crc kubenswrapper[4911]: I0929 21:42:55.049912 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75c8ddd69c-jqmrp" Sep 29 21:42:55 crc kubenswrapper[4911]: I0929 21:42:55.079195 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-75c8ddd69c-jqmrp"] Sep 29 21:42:55 crc kubenswrapper[4911]: I0929 21:42:55.087721 4911 scope.go:117] "RemoveContainer" containerID="ca08e0396e3f4ef3b97f8c84ed29bceb068d9a4f573bdb2d5fb15a986fa77d89" Sep 29 21:42:55 crc kubenswrapper[4911]: I0929 21:42:55.089591 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-75c8ddd69c-jqmrp"] Sep 29 21:42:55 crc kubenswrapper[4911]: I0929 21:42:55.210747 4911 patch_prober.go:28] interesting pod/machine-config-daemon-w647f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 21:42:55 crc kubenswrapper[4911]: I0929 21:42:55.211089 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 21:42:56 crc kubenswrapper[4911]: I0929 21:42:56.062815 4911 generic.go:334] "Generic (PLEG): container finished" podID="98530233-359f-4be3-a540-20553e9cbe30" containerID="6fe50da28db4566ab5ceab012dc52bf51b78d4bee83b05f89a556e984c261ccc" exitCode=0 Sep 29 21:42:56 crc kubenswrapper[4911]: I0929 21:42:56.063435 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5c685495c6-r4w7p" event={"ID":"98530233-359f-4be3-a540-20553e9cbe30","Type":"ContainerDied","Data":"6fe50da28db4566ab5ceab012dc52bf51b78d4bee83b05f89a556e984c261ccc"} Sep 29 21:42:56 crc kubenswrapper[4911]: I0929 21:42:56.067776 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea","Type":"ContainerStarted","Data":"01ccee288586f68231833f8e892104d71e21dcf2c463f1cfe5b71d9c9e41e69e"} Sep 29 21:42:56 crc kubenswrapper[4911]: I0929 21:42:56.068952 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 29 21:42:56 crc kubenswrapper[4911]: I0929 21:42:56.090569 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.627435143 podStartE2EDuration="5.090544074s" podCreationTimestamp="2025-09-29 21:42:51 +0000 UTC" firstStartedPulling="2025-09-29 21:42:51.952163348 +0000 UTC m=+1049.929276029" lastFinishedPulling="2025-09-29 21:42:55.415272289 +0000 UTC m=+1053.392384960" observedRunningTime="2025-09-29 21:42:56.084406954 +0000 UTC m=+1054.061519625" watchObservedRunningTime="2025-09-29 21:42:56.090544074 +0000 UTC m=+1054.067656755" Sep 29 21:42:56 crc kubenswrapper[4911]: I0929 21:42:56.308154 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5c685495c6-r4w7p" Sep 29 21:42:56 crc kubenswrapper[4911]: I0929 21:42:56.325851 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/98530233-359f-4be3-a540-20553e9cbe30-ovndb-tls-certs\") pod \"98530233-359f-4be3-a540-20553e9cbe30\" (UID: \"98530233-359f-4be3-a540-20553e9cbe30\") " Sep 29 21:42:56 crc kubenswrapper[4911]: I0929 21:42:56.326225 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d77k\" (UniqueName: \"kubernetes.io/projected/98530233-359f-4be3-a540-20553e9cbe30-kube-api-access-4d77k\") pod \"98530233-359f-4be3-a540-20553e9cbe30\" (UID: \"98530233-359f-4be3-a540-20553e9cbe30\") " Sep 29 21:42:56 crc kubenswrapper[4911]: I0929 21:42:56.326269 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/98530233-359f-4be3-a540-20553e9cbe30-config\") pod \"98530233-359f-4be3-a540-20553e9cbe30\" (UID: \"98530233-359f-4be3-a540-20553e9cbe30\") " Sep 29 21:42:56 crc kubenswrapper[4911]: I0929 21:42:56.326314 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/98530233-359f-4be3-a540-20553e9cbe30-combined-ca-bundle\") pod \"98530233-359f-4be3-a540-20553e9cbe30\" (UID: \"98530233-359f-4be3-a540-20553e9cbe30\") " Sep 29 21:42:56 crc kubenswrapper[4911]: I0929 21:42:56.326427 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/98530233-359f-4be3-a540-20553e9cbe30-httpd-config\") pod \"98530233-359f-4be3-a540-20553e9cbe30\" (UID: \"98530233-359f-4be3-a540-20553e9cbe30\") " Sep 29 21:42:56 crc kubenswrapper[4911]: I0929 21:42:56.333029 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/98530233-359f-4be3-a540-20553e9cbe30-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "98530233-359f-4be3-a540-20553e9cbe30" (UID: "98530233-359f-4be3-a540-20553e9cbe30"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:42:56 crc kubenswrapper[4911]: I0929 21:42:56.335652 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/98530233-359f-4be3-a540-20553e9cbe30-kube-api-access-4d77k" (OuterVolumeSpecName: "kube-api-access-4d77k") pod "98530233-359f-4be3-a540-20553e9cbe30" (UID: "98530233-359f-4be3-a540-20553e9cbe30"). InnerVolumeSpecName "kube-api-access-4d77k". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:42:56 crc kubenswrapper[4911]: I0929 21:42:56.382691 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/98530233-359f-4be3-a540-20553e9cbe30-config" (OuterVolumeSpecName: "config") pod "98530233-359f-4be3-a540-20553e9cbe30" (UID: "98530233-359f-4be3-a540-20553e9cbe30"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:42:56 crc kubenswrapper[4911]: I0929 21:42:56.405940 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/98530233-359f-4be3-a540-20553e9cbe30-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "98530233-359f-4be3-a540-20553e9cbe30" (UID: "98530233-359f-4be3-a540-20553e9cbe30"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:42:56 crc kubenswrapper[4911]: I0929 21:42:56.407611 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/98530233-359f-4be3-a540-20553e9cbe30-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "98530233-359f-4be3-a540-20553e9cbe30" (UID: "98530233-359f-4be3-a540-20553e9cbe30"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:42:56 crc kubenswrapper[4911]: I0929 21:42:56.442684 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d77k\" (UniqueName: \"kubernetes.io/projected/98530233-359f-4be3-a540-20553e9cbe30-kube-api-access-4d77k\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:56 crc kubenswrapper[4911]: I0929 21:42:56.442718 4911 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/98530233-359f-4be3-a540-20553e9cbe30-config\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:56 crc kubenswrapper[4911]: I0929 21:42:56.442733 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/98530233-359f-4be3-a540-20553e9cbe30-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:56 crc kubenswrapper[4911]: I0929 21:42:56.442743 4911 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/98530233-359f-4be3-a540-20553e9cbe30-httpd-config\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:56 crc kubenswrapper[4911]: I0929 21:42:56.442754 4911 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/98530233-359f-4be3-a540-20553e9cbe30-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:56 crc kubenswrapper[4911]: I0929 21:42:56.712898 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7eac87a1-5eb9-48e6-a3f1-f0a78d494674" path="/var/lib/kubelet/pods/7eac87a1-5eb9-48e6-a3f1-f0a78d494674/volumes" Sep 29 21:42:57 crc kubenswrapper[4911]: I0929 21:42:57.099619 4911 generic.go:334] "Generic (PLEG): container finished" podID="f8ed0335-40f9-4ef6-be8e-8866dce897f4" containerID="8e1bfc56c83547c9483aa11834ae24710ebef24a02162a656011acf68c69df9e" exitCode=0 Sep 29 21:42:57 crc kubenswrapper[4911]: I0929 21:42:57.099727 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"f8ed0335-40f9-4ef6-be8e-8866dce897f4","Type":"ContainerDied","Data":"8e1bfc56c83547c9483aa11834ae24710ebef24a02162a656011acf68c69df9e"} Sep 29 21:42:57 crc kubenswrapper[4911]: I0929 21:42:57.104933 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5c685495c6-r4w7p" Sep 29 21:42:57 crc kubenswrapper[4911]: I0929 21:42:57.104976 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5c685495c6-r4w7p" event={"ID":"98530233-359f-4be3-a540-20553e9cbe30","Type":"ContainerDied","Data":"75fcaa37844611e956c014d3e83e2c7e8b7d9b7fe235c061621d376be35b8fe7"} Sep 29 21:42:57 crc kubenswrapper[4911]: I0929 21:42:57.105292 4911 scope.go:117] "RemoveContainer" containerID="92dbffd1527696cbd6e1a9801e0678cf1afeda537edb5f16bbf5f17d2583dc27" Sep 29 21:42:57 crc kubenswrapper[4911]: I0929 21:42:57.134752 4911 scope.go:117] "RemoveContainer" containerID="6fe50da28db4566ab5ceab012dc52bf51b78d4bee83b05f89a556e984c261ccc" Sep 29 21:42:57 crc kubenswrapper[4911]: I0929 21:42:57.138019 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-5c685495c6-r4w7p"] Sep 29 21:42:57 crc kubenswrapper[4911]: I0929 21:42:57.144149 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-5c685495c6-r4w7p"] Sep 29 21:42:57 crc kubenswrapper[4911]: I0929 21:42:57.286693 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 29 21:42:57 crc kubenswrapper[4911]: I0929 21:42:57.361338 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f8ed0335-40f9-4ef6-be8e-8866dce897f4-scripts\") pod \"f8ed0335-40f9-4ef6-be8e-8866dce897f4\" (UID: \"f8ed0335-40f9-4ef6-be8e-8866dce897f4\") " Sep 29 21:42:57 crc kubenswrapper[4911]: I0929 21:42:57.361439 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f8ed0335-40f9-4ef6-be8e-8866dce897f4-config-data-custom\") pod \"f8ed0335-40f9-4ef6-be8e-8866dce897f4\" (UID: \"f8ed0335-40f9-4ef6-be8e-8866dce897f4\") " Sep 29 21:42:57 crc kubenswrapper[4911]: I0929 21:42:57.361466 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8ed0335-40f9-4ef6-be8e-8866dce897f4-combined-ca-bundle\") pod \"f8ed0335-40f9-4ef6-be8e-8866dce897f4\" (UID: \"f8ed0335-40f9-4ef6-be8e-8866dce897f4\") " Sep 29 21:42:57 crc kubenswrapper[4911]: I0929 21:42:57.361489 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f8ed0335-40f9-4ef6-be8e-8866dce897f4-config-data\") pod \"f8ed0335-40f9-4ef6-be8e-8866dce897f4\" (UID: \"f8ed0335-40f9-4ef6-be8e-8866dce897f4\") " Sep 29 21:42:57 crc kubenswrapper[4911]: I0929 21:42:57.361582 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f8ed0335-40f9-4ef6-be8e-8866dce897f4-etc-machine-id\") pod \"f8ed0335-40f9-4ef6-be8e-8866dce897f4\" (UID: \"f8ed0335-40f9-4ef6-be8e-8866dce897f4\") " Sep 29 21:42:57 crc kubenswrapper[4911]: I0929 21:42:57.361642 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zv8wz\" (UniqueName: \"kubernetes.io/projected/f8ed0335-40f9-4ef6-be8e-8866dce897f4-kube-api-access-zv8wz\") pod \"f8ed0335-40f9-4ef6-be8e-8866dce897f4\" (UID: \"f8ed0335-40f9-4ef6-be8e-8866dce897f4\") " Sep 29 21:42:57 crc kubenswrapper[4911]: I0929 21:42:57.362899 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f8ed0335-40f9-4ef6-be8e-8866dce897f4-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "f8ed0335-40f9-4ef6-be8e-8866dce897f4" (UID: "f8ed0335-40f9-4ef6-be8e-8866dce897f4"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 21:42:57 crc kubenswrapper[4911]: I0929 21:42:57.366693 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f8ed0335-40f9-4ef6-be8e-8866dce897f4-kube-api-access-zv8wz" (OuterVolumeSpecName: "kube-api-access-zv8wz") pod "f8ed0335-40f9-4ef6-be8e-8866dce897f4" (UID: "f8ed0335-40f9-4ef6-be8e-8866dce897f4"). InnerVolumeSpecName "kube-api-access-zv8wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:42:57 crc kubenswrapper[4911]: I0929 21:42:57.368955 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f8ed0335-40f9-4ef6-be8e-8866dce897f4-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "f8ed0335-40f9-4ef6-be8e-8866dce897f4" (UID: "f8ed0335-40f9-4ef6-be8e-8866dce897f4"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:42:57 crc kubenswrapper[4911]: I0929 21:42:57.374068 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f8ed0335-40f9-4ef6-be8e-8866dce897f4-scripts" (OuterVolumeSpecName: "scripts") pod "f8ed0335-40f9-4ef6-be8e-8866dce897f4" (UID: "f8ed0335-40f9-4ef6-be8e-8866dce897f4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:42:57 crc kubenswrapper[4911]: I0929 21:42:57.434971 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f8ed0335-40f9-4ef6-be8e-8866dce897f4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f8ed0335-40f9-4ef6-be8e-8866dce897f4" (UID: "f8ed0335-40f9-4ef6-be8e-8866dce897f4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:42:57 crc kubenswrapper[4911]: I0929 21:42:57.463991 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zv8wz\" (UniqueName: \"kubernetes.io/projected/f8ed0335-40f9-4ef6-be8e-8866dce897f4-kube-api-access-zv8wz\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:57 crc kubenswrapper[4911]: I0929 21:42:57.464028 4911 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f8ed0335-40f9-4ef6-be8e-8866dce897f4-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:57 crc kubenswrapper[4911]: I0929 21:42:57.464038 4911 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f8ed0335-40f9-4ef6-be8e-8866dce897f4-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:57 crc kubenswrapper[4911]: I0929 21:42:57.464046 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8ed0335-40f9-4ef6-be8e-8866dce897f4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:57 crc kubenswrapper[4911]: I0929 21:42:57.464054 4911 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f8ed0335-40f9-4ef6-be8e-8866dce897f4-etc-machine-id\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:57 crc kubenswrapper[4911]: I0929 21:42:57.480943 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f8ed0335-40f9-4ef6-be8e-8866dce897f4-config-data" (OuterVolumeSpecName: "config-data") pod "f8ed0335-40f9-4ef6-be8e-8866dce897f4" (UID: "f8ed0335-40f9-4ef6-be8e-8866dce897f4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:42:57 crc kubenswrapper[4911]: I0929 21:42:57.565896 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f8ed0335-40f9-4ef6-be8e-8866dce897f4-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 21:42:58 crc kubenswrapper[4911]: I0929 21:42:58.127172 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 29 21:42:58 crc kubenswrapper[4911]: I0929 21:42:58.132901 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"f8ed0335-40f9-4ef6-be8e-8866dce897f4","Type":"ContainerDied","Data":"9dcd36572dc071d0f9042d4c2390ca3741f65b5cda38c9ee1ce5ef44699a26ce"} Sep 29 21:42:58 crc kubenswrapper[4911]: I0929 21:42:58.132969 4911 scope.go:117] "RemoveContainer" containerID="f6d9871f360fecc11a527fe734d5f7290dfa4b99ea07897033b8b88ea84086f9" Sep 29 21:42:58 crc kubenswrapper[4911]: I0929 21:42:58.175905 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 21:42:58 crc kubenswrapper[4911]: I0929 21:42:58.187573 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 21:42:58 crc kubenswrapper[4911]: I0929 21:42:58.196349 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 21:42:58 crc kubenswrapper[4911]: E0929 21:42:58.196740 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74649df7-81b3-46eb-bd14-0ab5a40d1634" containerName="mariadb-database-create" Sep 29 21:42:58 crc kubenswrapper[4911]: I0929 21:42:58.196757 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="74649df7-81b3-46eb-bd14-0ab5a40d1634" containerName="mariadb-database-create" Sep 29 21:42:58 crc kubenswrapper[4911]: E0929 21:42:58.196778 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="847ed503-c9fa-4f0b-ac71-6f990bdf2fac" containerName="mariadb-database-create" Sep 29 21:42:58 crc kubenswrapper[4911]: I0929 21:42:58.196785 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="847ed503-c9fa-4f0b-ac71-6f990bdf2fac" containerName="mariadb-database-create" Sep 29 21:42:58 crc kubenswrapper[4911]: E0929 21:42:58.196814 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7eac87a1-5eb9-48e6-a3f1-f0a78d494674" containerName="dnsmasq-dns" Sep 29 21:42:58 crc kubenswrapper[4911]: I0929 21:42:58.196820 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="7eac87a1-5eb9-48e6-a3f1-f0a78d494674" containerName="dnsmasq-dns" Sep 29 21:42:58 crc kubenswrapper[4911]: E0929 21:42:58.196828 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98530233-359f-4be3-a540-20553e9cbe30" containerName="neutron-api" Sep 29 21:42:58 crc kubenswrapper[4911]: I0929 21:42:58.196834 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="98530233-359f-4be3-a540-20553e9cbe30" containerName="neutron-api" Sep 29 21:42:58 crc kubenswrapper[4911]: E0929 21:42:58.196851 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7eac87a1-5eb9-48e6-a3f1-f0a78d494674" containerName="init" Sep 29 21:42:58 crc kubenswrapper[4911]: I0929 21:42:58.196857 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="7eac87a1-5eb9-48e6-a3f1-f0a78d494674" containerName="init" Sep 29 21:42:58 crc kubenswrapper[4911]: E0929 21:42:58.196868 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8ed0335-40f9-4ef6-be8e-8866dce897f4" containerName="probe" Sep 29 21:42:58 crc kubenswrapper[4911]: I0929 21:42:58.196874 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8ed0335-40f9-4ef6-be8e-8866dce897f4" containerName="probe" Sep 29 21:42:58 crc kubenswrapper[4911]: E0929 21:42:58.196881 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98530233-359f-4be3-a540-20553e9cbe30" containerName="neutron-httpd" Sep 29 21:42:58 crc kubenswrapper[4911]: I0929 21:42:58.196888 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="98530233-359f-4be3-a540-20553e9cbe30" containerName="neutron-httpd" Sep 29 21:42:58 crc kubenswrapper[4911]: E0929 21:42:58.196897 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13437aeb-5dc5-428b-ba77-1265e2f44468" containerName="mariadb-database-create" Sep 29 21:42:58 crc kubenswrapper[4911]: I0929 21:42:58.196903 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="13437aeb-5dc5-428b-ba77-1265e2f44468" containerName="mariadb-database-create" Sep 29 21:42:58 crc kubenswrapper[4911]: E0929 21:42:58.196913 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8ed0335-40f9-4ef6-be8e-8866dce897f4" containerName="cinder-scheduler" Sep 29 21:42:58 crc kubenswrapper[4911]: I0929 21:42:58.196918 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8ed0335-40f9-4ef6-be8e-8866dce897f4" containerName="cinder-scheduler" Sep 29 21:42:58 crc kubenswrapper[4911]: I0929 21:42:58.197387 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="847ed503-c9fa-4f0b-ac71-6f990bdf2fac" containerName="mariadb-database-create" Sep 29 21:42:58 crc kubenswrapper[4911]: I0929 21:42:58.197401 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="f8ed0335-40f9-4ef6-be8e-8866dce897f4" containerName="cinder-scheduler" Sep 29 21:42:58 crc kubenswrapper[4911]: I0929 21:42:58.197412 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="74649df7-81b3-46eb-bd14-0ab5a40d1634" containerName="mariadb-database-create" Sep 29 21:42:58 crc kubenswrapper[4911]: I0929 21:42:58.197423 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="98530233-359f-4be3-a540-20553e9cbe30" containerName="neutron-httpd" Sep 29 21:42:58 crc kubenswrapper[4911]: I0929 21:42:58.197432 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="98530233-359f-4be3-a540-20553e9cbe30" containerName="neutron-api" Sep 29 21:42:58 crc kubenswrapper[4911]: I0929 21:42:58.197442 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="13437aeb-5dc5-428b-ba77-1265e2f44468" containerName="mariadb-database-create" Sep 29 21:42:58 crc kubenswrapper[4911]: I0929 21:42:58.197452 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="f8ed0335-40f9-4ef6-be8e-8866dce897f4" containerName="probe" Sep 29 21:42:58 crc kubenswrapper[4911]: I0929 21:42:58.197463 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="7eac87a1-5eb9-48e6-a3f1-f0a78d494674" containerName="dnsmasq-dns" Sep 29 21:42:58 crc kubenswrapper[4911]: I0929 21:42:58.198345 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 29 21:42:58 crc kubenswrapper[4911]: I0929 21:42:58.198888 4911 scope.go:117] "RemoveContainer" containerID="8e1bfc56c83547c9483aa11834ae24710ebef24a02162a656011acf68c69df9e" Sep 29 21:42:58 crc kubenswrapper[4911]: I0929 21:42:58.206799 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Sep 29 21:42:58 crc kubenswrapper[4911]: I0929 21:42:58.215683 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 21:42:58 crc kubenswrapper[4911]: I0929 21:42:58.279253 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ghctw\" (UniqueName: \"kubernetes.io/projected/896c8bf4-4402-448b-867f-ffd69d511949-kube-api-access-ghctw\") pod \"cinder-scheduler-0\" (UID: \"896c8bf4-4402-448b-867f-ffd69d511949\") " pod="openstack/cinder-scheduler-0" Sep 29 21:42:58 crc kubenswrapper[4911]: I0929 21:42:58.279313 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/896c8bf4-4402-448b-867f-ffd69d511949-config-data\") pod \"cinder-scheduler-0\" (UID: \"896c8bf4-4402-448b-867f-ffd69d511949\") " pod="openstack/cinder-scheduler-0" Sep 29 21:42:58 crc kubenswrapper[4911]: I0929 21:42:58.279584 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/896c8bf4-4402-448b-867f-ffd69d511949-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"896c8bf4-4402-448b-867f-ffd69d511949\") " pod="openstack/cinder-scheduler-0" Sep 29 21:42:58 crc kubenswrapper[4911]: I0929 21:42:58.279759 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/896c8bf4-4402-448b-867f-ffd69d511949-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"896c8bf4-4402-448b-867f-ffd69d511949\") " pod="openstack/cinder-scheduler-0" Sep 29 21:42:58 crc kubenswrapper[4911]: I0929 21:42:58.280011 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/896c8bf4-4402-448b-867f-ffd69d511949-scripts\") pod \"cinder-scheduler-0\" (UID: \"896c8bf4-4402-448b-867f-ffd69d511949\") " pod="openstack/cinder-scheduler-0" Sep 29 21:42:58 crc kubenswrapper[4911]: I0929 21:42:58.280217 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/896c8bf4-4402-448b-867f-ffd69d511949-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"896c8bf4-4402-448b-867f-ffd69d511949\") " pod="openstack/cinder-scheduler-0" Sep 29 21:42:58 crc kubenswrapper[4911]: I0929 21:42:58.384261 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/896c8bf4-4402-448b-867f-ffd69d511949-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"896c8bf4-4402-448b-867f-ffd69d511949\") " pod="openstack/cinder-scheduler-0" Sep 29 21:42:58 crc kubenswrapper[4911]: I0929 21:42:58.384469 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/896c8bf4-4402-448b-867f-ffd69d511949-scripts\") pod \"cinder-scheduler-0\" (UID: \"896c8bf4-4402-448b-867f-ffd69d511949\") " pod="openstack/cinder-scheduler-0" Sep 29 21:42:58 crc kubenswrapper[4911]: I0929 21:42:58.384642 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/896c8bf4-4402-448b-867f-ffd69d511949-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"896c8bf4-4402-448b-867f-ffd69d511949\") " pod="openstack/cinder-scheduler-0" Sep 29 21:42:58 crc kubenswrapper[4911]: I0929 21:42:58.384687 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ghctw\" (UniqueName: \"kubernetes.io/projected/896c8bf4-4402-448b-867f-ffd69d511949-kube-api-access-ghctw\") pod \"cinder-scheduler-0\" (UID: \"896c8bf4-4402-448b-867f-ffd69d511949\") " pod="openstack/cinder-scheduler-0" Sep 29 21:42:58 crc kubenswrapper[4911]: I0929 21:42:58.384742 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/896c8bf4-4402-448b-867f-ffd69d511949-config-data\") pod \"cinder-scheduler-0\" (UID: \"896c8bf4-4402-448b-867f-ffd69d511949\") " pod="openstack/cinder-scheduler-0" Sep 29 21:42:58 crc kubenswrapper[4911]: I0929 21:42:58.384855 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/896c8bf4-4402-448b-867f-ffd69d511949-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"896c8bf4-4402-448b-867f-ffd69d511949\") " pod="openstack/cinder-scheduler-0" Sep 29 21:42:58 crc kubenswrapper[4911]: I0929 21:42:58.384937 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/896c8bf4-4402-448b-867f-ffd69d511949-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"896c8bf4-4402-448b-867f-ffd69d511949\") " pod="openstack/cinder-scheduler-0" Sep 29 21:42:58 crc kubenswrapper[4911]: I0929 21:42:58.390419 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/896c8bf4-4402-448b-867f-ffd69d511949-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"896c8bf4-4402-448b-867f-ffd69d511949\") " pod="openstack/cinder-scheduler-0" Sep 29 21:42:58 crc kubenswrapper[4911]: I0929 21:42:58.392701 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/896c8bf4-4402-448b-867f-ffd69d511949-scripts\") pod \"cinder-scheduler-0\" (UID: \"896c8bf4-4402-448b-867f-ffd69d511949\") " pod="openstack/cinder-scheduler-0" Sep 29 21:42:58 crc kubenswrapper[4911]: I0929 21:42:58.402016 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/896c8bf4-4402-448b-867f-ffd69d511949-config-data\") pod \"cinder-scheduler-0\" (UID: \"896c8bf4-4402-448b-867f-ffd69d511949\") " pod="openstack/cinder-scheduler-0" Sep 29 21:42:58 crc kubenswrapper[4911]: I0929 21:42:58.418711 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/896c8bf4-4402-448b-867f-ffd69d511949-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"896c8bf4-4402-448b-867f-ffd69d511949\") " pod="openstack/cinder-scheduler-0" Sep 29 21:42:58 crc kubenswrapper[4911]: I0929 21:42:58.422564 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ghctw\" (UniqueName: \"kubernetes.io/projected/896c8bf4-4402-448b-867f-ffd69d511949-kube-api-access-ghctw\") pod \"cinder-scheduler-0\" (UID: \"896c8bf4-4402-448b-867f-ffd69d511949\") " pod="openstack/cinder-scheduler-0" Sep 29 21:42:58 crc kubenswrapper[4911]: I0929 21:42:58.528450 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 29 21:42:58 crc kubenswrapper[4911]: I0929 21:42:58.715308 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="98530233-359f-4be3-a540-20553e9cbe30" path="/var/lib/kubelet/pods/98530233-359f-4be3-a540-20553e9cbe30/volumes" Sep 29 21:42:58 crc kubenswrapper[4911]: I0929 21:42:58.716428 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f8ed0335-40f9-4ef6-be8e-8866dce897f4" path="/var/lib/kubelet/pods/f8ed0335-40f9-4ef6-be8e-8866dce897f4/volumes" Sep 29 21:42:58 crc kubenswrapper[4911]: I0929 21:42:58.978922 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 21:42:58 crc kubenswrapper[4911]: W0929 21:42:58.987803 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod896c8bf4_4402_448b_867f_ffd69d511949.slice/crio-e49eea2b024070b49b158c4a1c068fa3d529fd60240b417d5c0c8ced2307ae12 WatchSource:0}: Error finding container e49eea2b024070b49b158c4a1c068fa3d529fd60240b417d5c0c8ced2307ae12: Status 404 returned error can't find the container with id e49eea2b024070b49b158c4a1c068fa3d529fd60240b417d5c0c8ced2307ae12 Sep 29 21:42:59 crc kubenswrapper[4911]: I0929 21:42:59.135128 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"896c8bf4-4402-448b-867f-ffd69d511949","Type":"ContainerStarted","Data":"e49eea2b024070b49b158c4a1c068fa3d529fd60240b417d5c0c8ced2307ae12"} Sep 29 21:43:00 crc kubenswrapper[4911]: I0929 21:43:00.058198 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Sep 29 21:43:00 crc kubenswrapper[4911]: I0929 21:43:00.154306 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"896c8bf4-4402-448b-867f-ffd69d511949","Type":"ContainerStarted","Data":"1f66ecc10ee441c9913230c2fbaf66f288afa8640a18866b4e0a2d8de997aa5f"} Sep 29 21:43:00 crc kubenswrapper[4911]: I0929 21:43:00.288437 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 21:43:00 crc kubenswrapper[4911]: I0929 21:43:00.290056 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="ba309870-1c87-422f-93c3-81e704ee754e" containerName="glance-log" containerID="cri-o://019bfc1cc71a4b337fe89740955a869fc45c8db87475d1e1d8f451112efe7ef0" gracePeriod=30 Sep 29 21:43:00 crc kubenswrapper[4911]: I0929 21:43:00.290152 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="ba309870-1c87-422f-93c3-81e704ee754e" containerName="glance-httpd" containerID="cri-o://9c1c93dd23664949239ecc828a60ae4e9951dc1c1d9b1b6819469a961e9c30b6" gracePeriod=30 Sep 29 21:43:00 crc kubenswrapper[4911]: I0929 21:43:00.411403 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-7380-account-create-cr8m4"] Sep 29 21:43:00 crc kubenswrapper[4911]: I0929 21:43:00.412453 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-7380-account-create-cr8m4" Sep 29 21:43:00 crc kubenswrapper[4911]: I0929 21:43:00.413921 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Sep 29 21:43:00 crc kubenswrapper[4911]: I0929 21:43:00.425358 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-7380-account-create-cr8m4"] Sep 29 21:43:00 crc kubenswrapper[4911]: I0929 21:43:00.526543 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7rfxj\" (UniqueName: \"kubernetes.io/projected/754ff701-048b-42ea-a812-f54def8ad721-kube-api-access-7rfxj\") pod \"nova-api-7380-account-create-cr8m4\" (UID: \"754ff701-048b-42ea-a812-f54def8ad721\") " pod="openstack/nova-api-7380-account-create-cr8m4" Sep 29 21:43:00 crc kubenswrapper[4911]: I0929 21:43:00.614493 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-aecd-account-create-mpdgf"] Sep 29 21:43:00 crc kubenswrapper[4911]: I0929 21:43:00.615659 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-aecd-account-create-mpdgf" Sep 29 21:43:00 crc kubenswrapper[4911]: I0929 21:43:00.617285 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Sep 29 21:43:00 crc kubenswrapper[4911]: I0929 21:43:00.628248 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7rfxj\" (UniqueName: \"kubernetes.io/projected/754ff701-048b-42ea-a812-f54def8ad721-kube-api-access-7rfxj\") pod \"nova-api-7380-account-create-cr8m4\" (UID: \"754ff701-048b-42ea-a812-f54def8ad721\") " pod="openstack/nova-api-7380-account-create-cr8m4" Sep 29 21:43:00 crc kubenswrapper[4911]: I0929 21:43:00.632316 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-aecd-account-create-mpdgf"] Sep 29 21:43:00 crc kubenswrapper[4911]: I0929 21:43:00.662019 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7rfxj\" (UniqueName: \"kubernetes.io/projected/754ff701-048b-42ea-a812-f54def8ad721-kube-api-access-7rfxj\") pod \"nova-api-7380-account-create-cr8m4\" (UID: \"754ff701-048b-42ea-a812-f54def8ad721\") " pod="openstack/nova-api-7380-account-create-cr8m4" Sep 29 21:43:00 crc kubenswrapper[4911]: I0929 21:43:00.729780 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9q78p\" (UniqueName: \"kubernetes.io/projected/ea773222-bf22-4cb1-b26d-0ec95c9ef332-kube-api-access-9q78p\") pod \"nova-cell0-aecd-account-create-mpdgf\" (UID: \"ea773222-bf22-4cb1-b26d-0ec95c9ef332\") " pod="openstack/nova-cell0-aecd-account-create-mpdgf" Sep 29 21:43:00 crc kubenswrapper[4911]: I0929 21:43:00.758527 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-7380-account-create-cr8m4" Sep 29 21:43:00 crc kubenswrapper[4911]: I0929 21:43:00.812849 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-f4e5-account-create-4v4cn"] Sep 29 21:43:00 crc kubenswrapper[4911]: I0929 21:43:00.814290 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-f4e5-account-create-4v4cn" Sep 29 21:43:00 crc kubenswrapper[4911]: I0929 21:43:00.816569 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Sep 29 21:43:00 crc kubenswrapper[4911]: I0929 21:43:00.828538 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-f4e5-account-create-4v4cn"] Sep 29 21:43:00 crc kubenswrapper[4911]: I0929 21:43:00.831338 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9q78p\" (UniqueName: \"kubernetes.io/projected/ea773222-bf22-4cb1-b26d-0ec95c9ef332-kube-api-access-9q78p\") pod \"nova-cell0-aecd-account-create-mpdgf\" (UID: \"ea773222-bf22-4cb1-b26d-0ec95c9ef332\") " pod="openstack/nova-cell0-aecd-account-create-mpdgf" Sep 29 21:43:00 crc kubenswrapper[4911]: I0929 21:43:00.872918 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9q78p\" (UniqueName: \"kubernetes.io/projected/ea773222-bf22-4cb1-b26d-0ec95c9ef332-kube-api-access-9q78p\") pod \"nova-cell0-aecd-account-create-mpdgf\" (UID: \"ea773222-bf22-4cb1-b26d-0ec95c9ef332\") " pod="openstack/nova-cell0-aecd-account-create-mpdgf" Sep 29 21:43:00 crc kubenswrapper[4911]: I0929 21:43:00.959766 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-aecd-account-create-mpdgf" Sep 29 21:43:00 crc kubenswrapper[4911]: I0929 21:43:00.960413 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qcldl\" (UniqueName: \"kubernetes.io/projected/f943f20f-f0a2-4eb9-96b4-7bffe152853d-kube-api-access-qcldl\") pod \"nova-cell1-f4e5-account-create-4v4cn\" (UID: \"f943f20f-f0a2-4eb9-96b4-7bffe152853d\") " pod="openstack/nova-cell1-f4e5-account-create-4v4cn" Sep 29 21:43:01 crc kubenswrapper[4911]: I0929 21:43:01.062125 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qcldl\" (UniqueName: \"kubernetes.io/projected/f943f20f-f0a2-4eb9-96b4-7bffe152853d-kube-api-access-qcldl\") pod \"nova-cell1-f4e5-account-create-4v4cn\" (UID: \"f943f20f-f0a2-4eb9-96b4-7bffe152853d\") " pod="openstack/nova-cell1-f4e5-account-create-4v4cn" Sep 29 21:43:01 crc kubenswrapper[4911]: I0929 21:43:01.081481 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qcldl\" (UniqueName: \"kubernetes.io/projected/f943f20f-f0a2-4eb9-96b4-7bffe152853d-kube-api-access-qcldl\") pod \"nova-cell1-f4e5-account-create-4v4cn\" (UID: \"f943f20f-f0a2-4eb9-96b4-7bffe152853d\") " pod="openstack/nova-cell1-f4e5-account-create-4v4cn" Sep 29 21:43:01 crc kubenswrapper[4911]: I0929 21:43:01.166145 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"896c8bf4-4402-448b-867f-ffd69d511949","Type":"ContainerStarted","Data":"30cf31f37b8940e9677c3d30fa23c124c7bddfdf20b7b6ef94c5871117eda9c3"} Sep 29 21:43:01 crc kubenswrapper[4911]: I0929 21:43:01.166452 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-f4e5-account-create-4v4cn" Sep 29 21:43:01 crc kubenswrapper[4911]: I0929 21:43:01.187766 4911 generic.go:334] "Generic (PLEG): container finished" podID="ba309870-1c87-422f-93c3-81e704ee754e" containerID="019bfc1cc71a4b337fe89740955a869fc45c8db87475d1e1d8f451112efe7ef0" exitCode=143 Sep 29 21:43:01 crc kubenswrapper[4911]: I0929 21:43:01.187824 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ba309870-1c87-422f-93c3-81e704ee754e","Type":"ContainerDied","Data":"019bfc1cc71a4b337fe89740955a869fc45c8db87475d1e1d8f451112efe7ef0"} Sep 29 21:43:01 crc kubenswrapper[4911]: I0929 21:43:01.189380 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.189344519 podStartE2EDuration="3.189344519s" podCreationTimestamp="2025-09-29 21:42:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:43:01.185775778 +0000 UTC m=+1059.162888449" watchObservedRunningTime="2025-09-29 21:43:01.189344519 +0000 UTC m=+1059.166457190" Sep 29 21:43:01 crc kubenswrapper[4911]: I0929 21:43:01.307586 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-7380-account-create-cr8m4"] Sep 29 21:43:01 crc kubenswrapper[4911]: I0929 21:43:01.409692 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 21:43:01 crc kubenswrapper[4911]: I0929 21:43:01.410200 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="77d2a08f-5a1a-4847-81a8-a160afadf6aa" containerName="glance-log" containerID="cri-o://a7949bce4d8e3792e9e327667dfbc6e575e8c13b5ea06a6dd1dda31aa1560f2f" gracePeriod=30 Sep 29 21:43:01 crc kubenswrapper[4911]: I0929 21:43:01.410578 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="77d2a08f-5a1a-4847-81a8-a160afadf6aa" containerName="glance-httpd" containerID="cri-o://611630bd6f301abb0a92730c9bab0a0a1f05ea2f08d68536c17a12bd207233c2" gracePeriod=30 Sep 29 21:43:01 crc kubenswrapper[4911]: I0929 21:43:01.483491 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-aecd-account-create-mpdgf"] Sep 29 21:43:01 crc kubenswrapper[4911]: W0929 21:43:01.496471 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podea773222_bf22_4cb1_b26d_0ec95c9ef332.slice/crio-b8cd9a88278da1d48329f70312a5f1d0c534675a02482a6f365275d732c5a354 WatchSource:0}: Error finding container b8cd9a88278da1d48329f70312a5f1d0c534675a02482a6f365275d732c5a354: Status 404 returned error can't find the container with id b8cd9a88278da1d48329f70312a5f1d0c534675a02482a6f365275d732c5a354 Sep 29 21:43:01 crc kubenswrapper[4911]: I0929 21:43:01.672524 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-f4e5-account-create-4v4cn"] Sep 29 21:43:01 crc kubenswrapper[4911]: W0929 21:43:01.676321 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf943f20f_f0a2_4eb9_96b4_7bffe152853d.slice/crio-6ad228a7a4a013d056df721ac8c51755e89fd1147808a2eaa970013bcc230af2 WatchSource:0}: Error finding container 6ad228a7a4a013d056df721ac8c51755e89fd1147808a2eaa970013bcc230af2: Status 404 returned error can't find the container with id 6ad228a7a4a013d056df721ac8c51755e89fd1147808a2eaa970013bcc230af2 Sep 29 21:43:01 crc kubenswrapper[4911]: I0929 21:43:01.950923 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 21:43:01 crc kubenswrapper[4911]: I0929 21:43:01.951316 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea" containerName="ceilometer-central-agent" containerID="cri-o://bafe12cca95dec6a89fa926b8bfc5327ea95f7391235f4677746fb91b9f8bbf1" gracePeriod=30 Sep 29 21:43:01 crc kubenswrapper[4911]: I0929 21:43:01.951630 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea" containerName="proxy-httpd" containerID="cri-o://01ccee288586f68231833f8e892104d71e21dcf2c463f1cfe5b71d9c9e41e69e" gracePeriod=30 Sep 29 21:43:01 crc kubenswrapper[4911]: I0929 21:43:01.951839 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea" containerName="sg-core" containerID="cri-o://eff1276702b5642b5eaee71d8541b4a6097d7dd7d53af87a8621d5d82a3fd5d9" gracePeriod=30 Sep 29 21:43:01 crc kubenswrapper[4911]: I0929 21:43:01.951916 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea" containerName="ceilometer-notification-agent" containerID="cri-o://1378dc73e108e9e426635444f00480be775a77507b09467731ca704d8db1a656" gracePeriod=30 Sep 29 21:43:02 crc kubenswrapper[4911]: I0929 21:43:02.201889 4911 generic.go:334] "Generic (PLEG): container finished" podID="0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea" containerID="01ccee288586f68231833f8e892104d71e21dcf2c463f1cfe5b71d9c9e41e69e" exitCode=0 Sep 29 21:43:02 crc kubenswrapper[4911]: I0929 21:43:02.201920 4911 generic.go:334] "Generic (PLEG): container finished" podID="0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea" containerID="eff1276702b5642b5eaee71d8541b4a6097d7dd7d53af87a8621d5d82a3fd5d9" exitCode=2 Sep 29 21:43:02 crc kubenswrapper[4911]: I0929 21:43:02.201955 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea","Type":"ContainerDied","Data":"01ccee288586f68231833f8e892104d71e21dcf2c463f1cfe5b71d9c9e41e69e"} Sep 29 21:43:02 crc kubenswrapper[4911]: I0929 21:43:02.201979 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea","Type":"ContainerDied","Data":"eff1276702b5642b5eaee71d8541b4a6097d7dd7d53af87a8621d5d82a3fd5d9"} Sep 29 21:43:02 crc kubenswrapper[4911]: I0929 21:43:02.203573 4911 generic.go:334] "Generic (PLEG): container finished" podID="ea773222-bf22-4cb1-b26d-0ec95c9ef332" containerID="57d09cbec1d9733080d78c392e2828679789417b7981b6e0150ed1afa12353c0" exitCode=0 Sep 29 21:43:02 crc kubenswrapper[4911]: I0929 21:43:02.203609 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-aecd-account-create-mpdgf" event={"ID":"ea773222-bf22-4cb1-b26d-0ec95c9ef332","Type":"ContainerDied","Data":"57d09cbec1d9733080d78c392e2828679789417b7981b6e0150ed1afa12353c0"} Sep 29 21:43:02 crc kubenswrapper[4911]: I0929 21:43:02.203624 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-aecd-account-create-mpdgf" event={"ID":"ea773222-bf22-4cb1-b26d-0ec95c9ef332","Type":"ContainerStarted","Data":"b8cd9a88278da1d48329f70312a5f1d0c534675a02482a6f365275d732c5a354"} Sep 29 21:43:02 crc kubenswrapper[4911]: I0929 21:43:02.205954 4911 generic.go:334] "Generic (PLEG): container finished" podID="77d2a08f-5a1a-4847-81a8-a160afadf6aa" containerID="a7949bce4d8e3792e9e327667dfbc6e575e8c13b5ea06a6dd1dda31aa1560f2f" exitCode=143 Sep 29 21:43:02 crc kubenswrapper[4911]: I0929 21:43:02.205975 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"77d2a08f-5a1a-4847-81a8-a160afadf6aa","Type":"ContainerDied","Data":"a7949bce4d8e3792e9e327667dfbc6e575e8c13b5ea06a6dd1dda31aa1560f2f"} Sep 29 21:43:02 crc kubenswrapper[4911]: I0929 21:43:02.207583 4911 generic.go:334] "Generic (PLEG): container finished" podID="754ff701-048b-42ea-a812-f54def8ad721" containerID="4e552f7c86495e355ec94799db5ab2105eb24ef97acbf33c9055deb002800d6c" exitCode=0 Sep 29 21:43:02 crc kubenswrapper[4911]: I0929 21:43:02.207649 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-7380-account-create-cr8m4" event={"ID":"754ff701-048b-42ea-a812-f54def8ad721","Type":"ContainerDied","Data":"4e552f7c86495e355ec94799db5ab2105eb24ef97acbf33c9055deb002800d6c"} Sep 29 21:43:02 crc kubenswrapper[4911]: I0929 21:43:02.207669 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-7380-account-create-cr8m4" event={"ID":"754ff701-048b-42ea-a812-f54def8ad721","Type":"ContainerStarted","Data":"b1c3ef9c7445f39d02161894f1a50a798c80f3f90ca945c0654b0f34af5821df"} Sep 29 21:43:02 crc kubenswrapper[4911]: I0929 21:43:02.209065 4911 generic.go:334] "Generic (PLEG): container finished" podID="f943f20f-f0a2-4eb9-96b4-7bffe152853d" containerID="7bec0c2668962cc66c93bd3c318f7194f37988be3f9628ffe93b22e45c485ab3" exitCode=0 Sep 29 21:43:02 crc kubenswrapper[4911]: I0929 21:43:02.209143 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-f4e5-account-create-4v4cn" event={"ID":"f943f20f-f0a2-4eb9-96b4-7bffe152853d","Type":"ContainerDied","Data":"7bec0c2668962cc66c93bd3c318f7194f37988be3f9628ffe93b22e45c485ab3"} Sep 29 21:43:02 crc kubenswrapper[4911]: I0929 21:43:02.209177 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-f4e5-account-create-4v4cn" event={"ID":"f943f20f-f0a2-4eb9-96b4-7bffe152853d","Type":"ContainerStarted","Data":"6ad228a7a4a013d056df721ac8c51755e89fd1147808a2eaa970013bcc230af2"} Sep 29 21:43:03 crc kubenswrapper[4911]: I0929 21:43:03.227163 4911 generic.go:334] "Generic (PLEG): container finished" podID="0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea" containerID="1378dc73e108e9e426635444f00480be775a77507b09467731ca704d8db1a656" exitCode=0 Sep 29 21:43:03 crc kubenswrapper[4911]: I0929 21:43:03.227454 4911 generic.go:334] "Generic (PLEG): container finished" podID="0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea" containerID="bafe12cca95dec6a89fa926b8bfc5327ea95f7391235f4677746fb91b9f8bbf1" exitCode=0 Sep 29 21:43:03 crc kubenswrapper[4911]: I0929 21:43:03.227251 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea","Type":"ContainerDied","Data":"1378dc73e108e9e426635444f00480be775a77507b09467731ca704d8db1a656"} Sep 29 21:43:03 crc kubenswrapper[4911]: I0929 21:43:03.227650 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea","Type":"ContainerDied","Data":"bafe12cca95dec6a89fa926b8bfc5327ea95f7391235f4677746fb91b9f8bbf1"} Sep 29 21:43:03 crc kubenswrapper[4911]: I0929 21:43:03.529137 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Sep 29 21:43:03 crc kubenswrapper[4911]: I0929 21:43:03.613938 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 21:43:03 crc kubenswrapper[4911]: I0929 21:43:03.727617 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea-config-data\") pod \"0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea\" (UID: \"0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea\") " Sep 29 21:43:03 crc kubenswrapper[4911]: I0929 21:43:03.727651 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea-sg-core-conf-yaml\") pod \"0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea\" (UID: \"0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea\") " Sep 29 21:43:03 crc kubenswrapper[4911]: I0929 21:43:03.727741 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4z5p\" (UniqueName: \"kubernetes.io/projected/0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea-kube-api-access-x4z5p\") pod \"0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea\" (UID: \"0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea\") " Sep 29 21:43:03 crc kubenswrapper[4911]: I0929 21:43:03.727764 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea-scripts\") pod \"0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea\" (UID: \"0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea\") " Sep 29 21:43:03 crc kubenswrapper[4911]: I0929 21:43:03.731576 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-7380-account-create-cr8m4" Sep 29 21:43:03 crc kubenswrapper[4911]: I0929 21:43:03.732186 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea-combined-ca-bundle\") pod \"0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea\" (UID: \"0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea\") " Sep 29 21:43:03 crc kubenswrapper[4911]: I0929 21:43:03.732270 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea-run-httpd\") pod \"0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea\" (UID: \"0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea\") " Sep 29 21:43:03 crc kubenswrapper[4911]: I0929 21:43:03.732303 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea-log-httpd\") pod \"0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea\" (UID: \"0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea\") " Sep 29 21:43:03 crc kubenswrapper[4911]: I0929 21:43:03.733550 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea" (UID: "0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:43:03 crc kubenswrapper[4911]: I0929 21:43:03.735884 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea-scripts" (OuterVolumeSpecName: "scripts") pod "0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea" (UID: "0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:43:03 crc kubenswrapper[4911]: I0929 21:43:03.737951 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea" (UID: "0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:43:03 crc kubenswrapper[4911]: I0929 21:43:03.738939 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea-kube-api-access-x4z5p" (OuterVolumeSpecName: "kube-api-access-x4z5p") pod "0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea" (UID: "0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea"). InnerVolumeSpecName "kube-api-access-x4z5p". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:43:03 crc kubenswrapper[4911]: I0929 21:43:03.743300 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-aecd-account-create-mpdgf" Sep 29 21:43:03 crc kubenswrapper[4911]: I0929 21:43:03.744345 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-f4e5-account-create-4v4cn" Sep 29 21:43:03 crc kubenswrapper[4911]: I0929 21:43:03.786998 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea" (UID: "0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:43:03 crc kubenswrapper[4911]: I0929 21:43:03.824229 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea" (UID: "0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:43:03 crc kubenswrapper[4911]: I0929 21:43:03.833571 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9q78p\" (UniqueName: \"kubernetes.io/projected/ea773222-bf22-4cb1-b26d-0ec95c9ef332-kube-api-access-9q78p\") pod \"ea773222-bf22-4cb1-b26d-0ec95c9ef332\" (UID: \"ea773222-bf22-4cb1-b26d-0ec95c9ef332\") " Sep 29 21:43:03 crc kubenswrapper[4911]: I0929 21:43:03.833642 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qcldl\" (UniqueName: \"kubernetes.io/projected/f943f20f-f0a2-4eb9-96b4-7bffe152853d-kube-api-access-qcldl\") pod \"f943f20f-f0a2-4eb9-96b4-7bffe152853d\" (UID: \"f943f20f-f0a2-4eb9-96b4-7bffe152853d\") " Sep 29 21:43:03 crc kubenswrapper[4911]: I0929 21:43:03.833713 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7rfxj\" (UniqueName: \"kubernetes.io/projected/754ff701-048b-42ea-a812-f54def8ad721-kube-api-access-7rfxj\") pod \"754ff701-048b-42ea-a812-f54def8ad721\" (UID: \"754ff701-048b-42ea-a812-f54def8ad721\") " Sep 29 21:43:03 crc kubenswrapper[4911]: I0929 21:43:03.834183 4911 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:03 crc kubenswrapper[4911]: I0929 21:43:03.834196 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4z5p\" (UniqueName: \"kubernetes.io/projected/0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea-kube-api-access-x4z5p\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:03 crc kubenswrapper[4911]: I0929 21:43:03.834208 4911 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:03 crc kubenswrapper[4911]: I0929 21:43:03.834216 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:03 crc kubenswrapper[4911]: I0929 21:43:03.834224 4911 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:03 crc kubenswrapper[4911]: I0929 21:43:03.834238 4911 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:03 crc kubenswrapper[4911]: I0929 21:43:03.837467 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f943f20f-f0a2-4eb9-96b4-7bffe152853d-kube-api-access-qcldl" (OuterVolumeSpecName: "kube-api-access-qcldl") pod "f943f20f-f0a2-4eb9-96b4-7bffe152853d" (UID: "f943f20f-f0a2-4eb9-96b4-7bffe152853d"). InnerVolumeSpecName "kube-api-access-qcldl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:43:03 crc kubenswrapper[4911]: I0929 21:43:03.840062 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ea773222-bf22-4cb1-b26d-0ec95c9ef332-kube-api-access-9q78p" (OuterVolumeSpecName: "kube-api-access-9q78p") pod "ea773222-bf22-4cb1-b26d-0ec95c9ef332" (UID: "ea773222-bf22-4cb1-b26d-0ec95c9ef332"). InnerVolumeSpecName "kube-api-access-9q78p". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:43:03 crc kubenswrapper[4911]: I0929 21:43:03.840418 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/754ff701-048b-42ea-a812-f54def8ad721-kube-api-access-7rfxj" (OuterVolumeSpecName: "kube-api-access-7rfxj") pod "754ff701-048b-42ea-a812-f54def8ad721" (UID: "754ff701-048b-42ea-a812-f54def8ad721"). InnerVolumeSpecName "kube-api-access-7rfxj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:43:03 crc kubenswrapper[4911]: I0929 21:43:03.846939 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea-config-data" (OuterVolumeSpecName: "config-data") pod "0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea" (UID: "0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:43:03 crc kubenswrapper[4911]: I0929 21:43:03.935981 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7rfxj\" (UniqueName: \"kubernetes.io/projected/754ff701-048b-42ea-a812-f54def8ad721-kube-api-access-7rfxj\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:03 crc kubenswrapper[4911]: I0929 21:43:03.936238 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:03 crc kubenswrapper[4911]: I0929 21:43:03.936248 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9q78p\" (UniqueName: \"kubernetes.io/projected/ea773222-bf22-4cb1-b26d-0ec95c9ef332-kube-api-access-9q78p\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:03 crc kubenswrapper[4911]: I0929 21:43:03.936257 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qcldl\" (UniqueName: \"kubernetes.io/projected/f943f20f-f0a2-4eb9-96b4-7bffe152853d-kube-api-access-qcldl\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:03 crc kubenswrapper[4911]: I0929 21:43:03.993653 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.139831 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ba309870-1c87-422f-93c3-81e704ee754e-public-tls-certs\") pod \"ba309870-1c87-422f-93c3-81e704ee754e\" (UID: \"ba309870-1c87-422f-93c3-81e704ee754e\") " Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.139908 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ba309870-1c87-422f-93c3-81e704ee754e-logs\") pod \"ba309870-1c87-422f-93c3-81e704ee754e\" (UID: \"ba309870-1c87-422f-93c3-81e704ee754e\") " Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.139932 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"ba309870-1c87-422f-93c3-81e704ee754e\" (UID: \"ba309870-1c87-422f-93c3-81e704ee754e\") " Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.139953 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ba309870-1c87-422f-93c3-81e704ee754e-config-data\") pod \"ba309870-1c87-422f-93c3-81e704ee754e\" (UID: \"ba309870-1c87-422f-93c3-81e704ee754e\") " Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.139988 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ba309870-1c87-422f-93c3-81e704ee754e-scripts\") pod \"ba309870-1c87-422f-93c3-81e704ee754e\" (UID: \"ba309870-1c87-422f-93c3-81e704ee754e\") " Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.140005 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ba309870-1c87-422f-93c3-81e704ee754e-httpd-run\") pod \"ba309870-1c87-422f-93c3-81e704ee754e\" (UID: \"ba309870-1c87-422f-93c3-81e704ee754e\") " Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.140067 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba309870-1c87-422f-93c3-81e704ee754e-combined-ca-bundle\") pod \"ba309870-1c87-422f-93c3-81e704ee754e\" (UID: \"ba309870-1c87-422f-93c3-81e704ee754e\") " Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.140109 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pgwbt\" (UniqueName: \"kubernetes.io/projected/ba309870-1c87-422f-93c3-81e704ee754e-kube-api-access-pgwbt\") pod \"ba309870-1c87-422f-93c3-81e704ee754e\" (UID: \"ba309870-1c87-422f-93c3-81e704ee754e\") " Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.141957 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ba309870-1c87-422f-93c3-81e704ee754e-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "ba309870-1c87-422f-93c3-81e704ee754e" (UID: "ba309870-1c87-422f-93c3-81e704ee754e"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.142431 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ba309870-1c87-422f-93c3-81e704ee754e-logs" (OuterVolumeSpecName: "logs") pod "ba309870-1c87-422f-93c3-81e704ee754e" (UID: "ba309870-1c87-422f-93c3-81e704ee754e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.144227 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ba309870-1c87-422f-93c3-81e704ee754e-kube-api-access-pgwbt" (OuterVolumeSpecName: "kube-api-access-pgwbt") pod "ba309870-1c87-422f-93c3-81e704ee754e" (UID: "ba309870-1c87-422f-93c3-81e704ee754e"). InnerVolumeSpecName "kube-api-access-pgwbt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.157903 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba309870-1c87-422f-93c3-81e704ee754e-scripts" (OuterVolumeSpecName: "scripts") pod "ba309870-1c87-422f-93c3-81e704ee754e" (UID: "ba309870-1c87-422f-93c3-81e704ee754e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.160603 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage12-crc" (OuterVolumeSpecName: "glance") pod "ba309870-1c87-422f-93c3-81e704ee754e" (UID: "ba309870-1c87-422f-93c3-81e704ee754e"). InnerVolumeSpecName "local-storage12-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.168859 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba309870-1c87-422f-93c3-81e704ee754e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ba309870-1c87-422f-93c3-81e704ee754e" (UID: "ba309870-1c87-422f-93c3-81e704ee754e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.190646 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba309870-1c87-422f-93c3-81e704ee754e-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "ba309870-1c87-422f-93c3-81e704ee754e" (UID: "ba309870-1c87-422f-93c3-81e704ee754e"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.194383 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba309870-1c87-422f-93c3-81e704ee754e-config-data" (OuterVolumeSpecName: "config-data") pod "ba309870-1c87-422f-93c3-81e704ee754e" (UID: "ba309870-1c87-422f-93c3-81e704ee754e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.244117 4911 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ba309870-1c87-422f-93c3-81e704ee754e-public-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.244154 4911 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ba309870-1c87-422f-93c3-81e704ee754e-logs\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.244186 4911 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" " Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.244198 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ba309870-1c87-422f-93c3-81e704ee754e-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.244209 4911 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ba309870-1c87-422f-93c3-81e704ee754e-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.244219 4911 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ba309870-1c87-422f-93c3-81e704ee754e-httpd-run\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.244229 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba309870-1c87-422f-93c3-81e704ee754e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.244279 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pgwbt\" (UniqueName: \"kubernetes.io/projected/ba309870-1c87-422f-93c3-81e704ee754e-kube-api-access-pgwbt\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.247645 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea","Type":"ContainerDied","Data":"b4ccc5d1ac9d6c43623de263bb95f090dd181a4e208532ebddc78b0a9acce160"} Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.247710 4911 scope.go:117] "RemoveContainer" containerID="01ccee288586f68231833f8e892104d71e21dcf2c463f1cfe5b71d9c9e41e69e" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.247846 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.257178 4911 generic.go:334] "Generic (PLEG): container finished" podID="ba309870-1c87-422f-93c3-81e704ee754e" containerID="9c1c93dd23664949239ecc828a60ae4e9951dc1c1d9b1b6819469a961e9c30b6" exitCode=0 Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.257324 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.257383 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ba309870-1c87-422f-93c3-81e704ee754e","Type":"ContainerDied","Data":"9c1c93dd23664949239ecc828a60ae4e9951dc1c1d9b1b6819469a961e9c30b6"} Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.257420 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ba309870-1c87-422f-93c3-81e704ee754e","Type":"ContainerDied","Data":"e72b896eb112745aa52f575128e79d4b4f2e335fee7b8ec49ed07be101e8d452"} Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.259209 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-aecd-account-create-mpdgf" event={"ID":"ea773222-bf22-4cb1-b26d-0ec95c9ef332","Type":"ContainerDied","Data":"b8cd9a88278da1d48329f70312a5f1d0c534675a02482a6f365275d732c5a354"} Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.259306 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b8cd9a88278da1d48329f70312a5f1d0c534675a02482a6f365275d732c5a354" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.259411 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-aecd-account-create-mpdgf" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.264555 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-7380-account-create-cr8m4" event={"ID":"754ff701-048b-42ea-a812-f54def8ad721","Type":"ContainerDied","Data":"b1c3ef9c7445f39d02161894f1a50a798c80f3f90ca945c0654b0f34af5821df"} Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.264574 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-7380-account-create-cr8m4" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.264588 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b1c3ef9c7445f39d02161894f1a50a798c80f3f90ca945c0654b0f34af5821df" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.267033 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-f4e5-account-create-4v4cn" event={"ID":"f943f20f-f0a2-4eb9-96b4-7bffe152853d","Type":"ContainerDied","Data":"6ad228a7a4a013d056df721ac8c51755e89fd1147808a2eaa970013bcc230af2"} Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.267084 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6ad228a7a4a013d056df721ac8c51755e89fd1147808a2eaa970013bcc230af2" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.267094 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-f4e5-account-create-4v4cn" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.288992 4911 scope.go:117] "RemoveContainer" containerID="eff1276702b5642b5eaee71d8541b4a6097d7dd7d53af87a8621d5d82a3fd5d9" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.300472 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.338345 4911 scope.go:117] "RemoveContainer" containerID="1378dc73e108e9e426635444f00480be775a77507b09467731ca704d8db1a656" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.341218 4911 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage12-crc" (UniqueName: "kubernetes.io/local-volume/local-storage12-crc") on node "crc" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.350856 4911 reconciler_common.go:293] "Volume detached for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.379886 4911 scope.go:117] "RemoveContainer" containerID="bafe12cca95dec6a89fa926b8bfc5327ea95f7391235f4677746fb91b9f8bbf1" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.399297 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.423186 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.425383 4911 scope.go:117] "RemoveContainer" containerID="9c1c93dd23664949239ecc828a60ae4e9951dc1c1d9b1b6819469a961e9c30b6" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.430915 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.438913 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 29 21:43:04 crc kubenswrapper[4911]: E0929 21:43:04.439356 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="754ff701-048b-42ea-a812-f54def8ad721" containerName="mariadb-account-create" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.439380 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="754ff701-048b-42ea-a812-f54def8ad721" containerName="mariadb-account-create" Sep 29 21:43:04 crc kubenswrapper[4911]: E0929 21:43:04.439398 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea" containerName="ceilometer-notification-agent" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.439407 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea" containerName="ceilometer-notification-agent" Sep 29 21:43:04 crc kubenswrapper[4911]: E0929 21:43:04.439419 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba309870-1c87-422f-93c3-81e704ee754e" containerName="glance-log" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.439426 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba309870-1c87-422f-93c3-81e704ee754e" containerName="glance-log" Sep 29 21:43:04 crc kubenswrapper[4911]: E0929 21:43:04.439441 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f943f20f-f0a2-4eb9-96b4-7bffe152853d" containerName="mariadb-account-create" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.439450 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="f943f20f-f0a2-4eb9-96b4-7bffe152853d" containerName="mariadb-account-create" Sep 29 21:43:04 crc kubenswrapper[4911]: E0929 21:43:04.439464 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea" containerName="ceilometer-central-agent" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.439473 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea" containerName="ceilometer-central-agent" Sep 29 21:43:04 crc kubenswrapper[4911]: E0929 21:43:04.439494 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba309870-1c87-422f-93c3-81e704ee754e" containerName="glance-httpd" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.439502 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba309870-1c87-422f-93c3-81e704ee754e" containerName="glance-httpd" Sep 29 21:43:04 crc kubenswrapper[4911]: E0929 21:43:04.439522 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea" containerName="proxy-httpd" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.439529 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea" containerName="proxy-httpd" Sep 29 21:43:04 crc kubenswrapper[4911]: E0929 21:43:04.439547 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea773222-bf22-4cb1-b26d-0ec95c9ef332" containerName="mariadb-account-create" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.439554 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea773222-bf22-4cb1-b26d-0ec95c9ef332" containerName="mariadb-account-create" Sep 29 21:43:04 crc kubenswrapper[4911]: E0929 21:43:04.439573 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea" containerName="sg-core" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.439580 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea" containerName="sg-core" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.439774 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="ba309870-1c87-422f-93c3-81e704ee754e" containerName="glance-log" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.439805 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="754ff701-048b-42ea-a812-f54def8ad721" containerName="mariadb-account-create" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.439822 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea" containerName="sg-core" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.439842 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea" containerName="ceilometer-central-agent" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.439860 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="f943f20f-f0a2-4eb9-96b4-7bffe152853d" containerName="mariadb-account-create" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.439873 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea" containerName="proxy-httpd" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.439889 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="ba309870-1c87-422f-93c3-81e704ee754e" containerName="glance-httpd" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.439901 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea773222-bf22-4cb1-b26d-0ec95c9ef332" containerName="mariadb-account-create" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.439914 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea" containerName="ceilometer-notification-agent" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.443425 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.444656 4911 scope.go:117] "RemoveContainer" containerID="019bfc1cc71a4b337fe89740955a869fc45c8db87475d1e1d8f451112efe7ef0" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.445442 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.447910 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.448636 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.459157 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.460674 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.463530 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.463783 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.468055 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.477861 4911 scope.go:117] "RemoveContainer" containerID="9c1c93dd23664949239ecc828a60ae4e9951dc1c1d9b1b6819469a961e9c30b6" Sep 29 21:43:04 crc kubenswrapper[4911]: E0929 21:43:04.478168 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9c1c93dd23664949239ecc828a60ae4e9951dc1c1d9b1b6819469a961e9c30b6\": container with ID starting with 9c1c93dd23664949239ecc828a60ae4e9951dc1c1d9b1b6819469a961e9c30b6 not found: ID does not exist" containerID="9c1c93dd23664949239ecc828a60ae4e9951dc1c1d9b1b6819469a961e9c30b6" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.478195 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9c1c93dd23664949239ecc828a60ae4e9951dc1c1d9b1b6819469a961e9c30b6"} err="failed to get container status \"9c1c93dd23664949239ecc828a60ae4e9951dc1c1d9b1b6819469a961e9c30b6\": rpc error: code = NotFound desc = could not find container \"9c1c93dd23664949239ecc828a60ae4e9951dc1c1d9b1b6819469a961e9c30b6\": container with ID starting with 9c1c93dd23664949239ecc828a60ae4e9951dc1c1d9b1b6819469a961e9c30b6 not found: ID does not exist" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.478217 4911 scope.go:117] "RemoveContainer" containerID="019bfc1cc71a4b337fe89740955a869fc45c8db87475d1e1d8f451112efe7ef0" Sep 29 21:43:04 crc kubenswrapper[4911]: E0929 21:43:04.478414 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"019bfc1cc71a4b337fe89740955a869fc45c8db87475d1e1d8f451112efe7ef0\": container with ID starting with 019bfc1cc71a4b337fe89740955a869fc45c8db87475d1e1d8f451112efe7ef0 not found: ID does not exist" containerID="019bfc1cc71a4b337fe89740955a869fc45c8db87475d1e1d8f451112efe7ef0" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.478441 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"019bfc1cc71a4b337fe89740955a869fc45c8db87475d1e1d8f451112efe7ef0"} err="failed to get container status \"019bfc1cc71a4b337fe89740955a869fc45c8db87475d1e1d8f451112efe7ef0\": rpc error: code = NotFound desc = could not find container \"019bfc1cc71a4b337fe89740955a869fc45c8db87475d1e1d8f451112efe7ef0\": container with ID starting with 019bfc1cc71a4b337fe89740955a869fc45c8db87475d1e1d8f451112efe7ef0 not found: ID does not exist" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.554269 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f5cf68f0-dbfb-4256-8ca2-349a4658eb96-scripts\") pod \"ceilometer-0\" (UID: \"f5cf68f0-dbfb-4256-8ca2-349a4658eb96\") " pod="openstack/ceilometer-0" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.554338 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b31746fe-e0eb-4ce1-9d20-3abc7b66ebf4-scripts\") pod \"glance-default-external-api-0\" (UID: \"b31746fe-e0eb-4ce1-9d20-3abc7b66ebf4\") " pod="openstack/glance-default-external-api-0" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.554389 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b31746fe-e0eb-4ce1-9d20-3abc7b66ebf4-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"b31746fe-e0eb-4ce1-9d20-3abc7b66ebf4\") " pod="openstack/glance-default-external-api-0" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.554423 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f5cf68f0-dbfb-4256-8ca2-349a4658eb96-log-httpd\") pod \"ceilometer-0\" (UID: \"f5cf68f0-dbfb-4256-8ca2-349a4658eb96\") " pod="openstack/ceilometer-0" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.554455 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f5cf68f0-dbfb-4256-8ca2-349a4658eb96-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f5cf68f0-dbfb-4256-8ca2-349a4658eb96\") " pod="openstack/ceilometer-0" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.554477 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f5cf68f0-dbfb-4256-8ca2-349a4658eb96-run-httpd\") pod \"ceilometer-0\" (UID: \"f5cf68f0-dbfb-4256-8ca2-349a4658eb96\") " pod="openstack/ceilometer-0" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.554507 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b31746fe-e0eb-4ce1-9d20-3abc7b66ebf4-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"b31746fe-e0eb-4ce1-9d20-3abc7b66ebf4\") " pod="openstack/glance-default-external-api-0" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.554530 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5cf68f0-dbfb-4256-8ca2-349a4658eb96-config-data\") pod \"ceilometer-0\" (UID: \"f5cf68f0-dbfb-4256-8ca2-349a4658eb96\") " pod="openstack/ceilometer-0" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.554562 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b31746fe-e0eb-4ce1-9d20-3abc7b66ebf4-config-data\") pod \"glance-default-external-api-0\" (UID: \"b31746fe-e0eb-4ce1-9d20-3abc7b66ebf4\") " pod="openstack/glance-default-external-api-0" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.554587 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5cf68f0-dbfb-4256-8ca2-349a4658eb96-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f5cf68f0-dbfb-4256-8ca2-349a4658eb96\") " pod="openstack/ceilometer-0" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.554620 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8lr26\" (UniqueName: \"kubernetes.io/projected/f5cf68f0-dbfb-4256-8ca2-349a4658eb96-kube-api-access-8lr26\") pod \"ceilometer-0\" (UID: \"f5cf68f0-dbfb-4256-8ca2-349a4658eb96\") " pod="openstack/ceilometer-0" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.554664 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"b31746fe-e0eb-4ce1-9d20-3abc7b66ebf4\") " pod="openstack/glance-default-external-api-0" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.554690 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b31746fe-e0eb-4ce1-9d20-3abc7b66ebf4-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"b31746fe-e0eb-4ce1-9d20-3abc7b66ebf4\") " pod="openstack/glance-default-external-api-0" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.554726 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b31746fe-e0eb-4ce1-9d20-3abc7b66ebf4-logs\") pod \"glance-default-external-api-0\" (UID: \"b31746fe-e0eb-4ce1-9d20-3abc7b66ebf4\") " pod="openstack/glance-default-external-api-0" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.554748 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rfpxb\" (UniqueName: \"kubernetes.io/projected/b31746fe-e0eb-4ce1-9d20-3abc7b66ebf4-kube-api-access-rfpxb\") pod \"glance-default-external-api-0\" (UID: \"b31746fe-e0eb-4ce1-9d20-3abc7b66ebf4\") " pod="openstack/glance-default-external-api-0" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.656503 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b31746fe-e0eb-4ce1-9d20-3abc7b66ebf4-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"b31746fe-e0eb-4ce1-9d20-3abc7b66ebf4\") " pod="openstack/glance-default-external-api-0" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.656772 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5cf68f0-dbfb-4256-8ca2-349a4658eb96-config-data\") pod \"ceilometer-0\" (UID: \"f5cf68f0-dbfb-4256-8ca2-349a4658eb96\") " pod="openstack/ceilometer-0" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.656823 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b31746fe-e0eb-4ce1-9d20-3abc7b66ebf4-config-data\") pod \"glance-default-external-api-0\" (UID: \"b31746fe-e0eb-4ce1-9d20-3abc7b66ebf4\") " pod="openstack/glance-default-external-api-0" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.656843 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5cf68f0-dbfb-4256-8ca2-349a4658eb96-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f5cf68f0-dbfb-4256-8ca2-349a4658eb96\") " pod="openstack/ceilometer-0" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.656862 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8lr26\" (UniqueName: \"kubernetes.io/projected/f5cf68f0-dbfb-4256-8ca2-349a4658eb96-kube-api-access-8lr26\") pod \"ceilometer-0\" (UID: \"f5cf68f0-dbfb-4256-8ca2-349a4658eb96\") " pod="openstack/ceilometer-0" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.656909 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"b31746fe-e0eb-4ce1-9d20-3abc7b66ebf4\") " pod="openstack/glance-default-external-api-0" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.656929 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b31746fe-e0eb-4ce1-9d20-3abc7b66ebf4-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"b31746fe-e0eb-4ce1-9d20-3abc7b66ebf4\") " pod="openstack/glance-default-external-api-0" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.656975 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b31746fe-e0eb-4ce1-9d20-3abc7b66ebf4-logs\") pod \"glance-default-external-api-0\" (UID: \"b31746fe-e0eb-4ce1-9d20-3abc7b66ebf4\") " pod="openstack/glance-default-external-api-0" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.656995 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rfpxb\" (UniqueName: \"kubernetes.io/projected/b31746fe-e0eb-4ce1-9d20-3abc7b66ebf4-kube-api-access-rfpxb\") pod \"glance-default-external-api-0\" (UID: \"b31746fe-e0eb-4ce1-9d20-3abc7b66ebf4\") " pod="openstack/glance-default-external-api-0" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.657062 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f5cf68f0-dbfb-4256-8ca2-349a4658eb96-scripts\") pod \"ceilometer-0\" (UID: \"f5cf68f0-dbfb-4256-8ca2-349a4658eb96\") " pod="openstack/ceilometer-0" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.657090 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b31746fe-e0eb-4ce1-9d20-3abc7b66ebf4-scripts\") pod \"glance-default-external-api-0\" (UID: \"b31746fe-e0eb-4ce1-9d20-3abc7b66ebf4\") " pod="openstack/glance-default-external-api-0" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.657129 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b31746fe-e0eb-4ce1-9d20-3abc7b66ebf4-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"b31746fe-e0eb-4ce1-9d20-3abc7b66ebf4\") " pod="openstack/glance-default-external-api-0" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.657149 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f5cf68f0-dbfb-4256-8ca2-349a4658eb96-log-httpd\") pod \"ceilometer-0\" (UID: \"f5cf68f0-dbfb-4256-8ca2-349a4658eb96\") " pod="openstack/ceilometer-0" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.657157 4911 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"b31746fe-e0eb-4ce1-9d20-3abc7b66ebf4\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/glance-default-external-api-0" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.657170 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f5cf68f0-dbfb-4256-8ca2-349a4658eb96-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f5cf68f0-dbfb-4256-8ca2-349a4658eb96\") " pod="openstack/ceilometer-0" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.657476 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f5cf68f0-dbfb-4256-8ca2-349a4658eb96-run-httpd\") pod \"ceilometer-0\" (UID: \"f5cf68f0-dbfb-4256-8ca2-349a4658eb96\") " pod="openstack/ceilometer-0" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.657989 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f5cf68f0-dbfb-4256-8ca2-349a4658eb96-run-httpd\") pod \"ceilometer-0\" (UID: \"f5cf68f0-dbfb-4256-8ca2-349a4658eb96\") " pod="openstack/ceilometer-0" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.658449 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b31746fe-e0eb-4ce1-9d20-3abc7b66ebf4-logs\") pod \"glance-default-external-api-0\" (UID: \"b31746fe-e0eb-4ce1-9d20-3abc7b66ebf4\") " pod="openstack/glance-default-external-api-0" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.658727 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b31746fe-e0eb-4ce1-9d20-3abc7b66ebf4-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"b31746fe-e0eb-4ce1-9d20-3abc7b66ebf4\") " pod="openstack/glance-default-external-api-0" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.659271 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f5cf68f0-dbfb-4256-8ca2-349a4658eb96-log-httpd\") pod \"ceilometer-0\" (UID: \"f5cf68f0-dbfb-4256-8ca2-349a4658eb96\") " pod="openstack/ceilometer-0" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.663454 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5cf68f0-dbfb-4256-8ca2-349a4658eb96-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f5cf68f0-dbfb-4256-8ca2-349a4658eb96\") " pod="openstack/ceilometer-0" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.663487 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b31746fe-e0eb-4ce1-9d20-3abc7b66ebf4-scripts\") pod \"glance-default-external-api-0\" (UID: \"b31746fe-e0eb-4ce1-9d20-3abc7b66ebf4\") " pod="openstack/glance-default-external-api-0" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.663639 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5cf68f0-dbfb-4256-8ca2-349a4658eb96-config-data\") pod \"ceilometer-0\" (UID: \"f5cf68f0-dbfb-4256-8ca2-349a4658eb96\") " pod="openstack/ceilometer-0" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.663785 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f5cf68f0-dbfb-4256-8ca2-349a4658eb96-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f5cf68f0-dbfb-4256-8ca2-349a4658eb96\") " pod="openstack/ceilometer-0" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.665673 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f5cf68f0-dbfb-4256-8ca2-349a4658eb96-scripts\") pod \"ceilometer-0\" (UID: \"f5cf68f0-dbfb-4256-8ca2-349a4658eb96\") " pod="openstack/ceilometer-0" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.675464 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rfpxb\" (UniqueName: \"kubernetes.io/projected/b31746fe-e0eb-4ce1-9d20-3abc7b66ebf4-kube-api-access-rfpxb\") pod \"glance-default-external-api-0\" (UID: \"b31746fe-e0eb-4ce1-9d20-3abc7b66ebf4\") " pod="openstack/glance-default-external-api-0" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.675498 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8lr26\" (UniqueName: \"kubernetes.io/projected/f5cf68f0-dbfb-4256-8ca2-349a4658eb96-kube-api-access-8lr26\") pod \"ceilometer-0\" (UID: \"f5cf68f0-dbfb-4256-8ca2-349a4658eb96\") " pod="openstack/ceilometer-0" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.675585 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b31746fe-e0eb-4ce1-9d20-3abc7b66ebf4-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"b31746fe-e0eb-4ce1-9d20-3abc7b66ebf4\") " pod="openstack/glance-default-external-api-0" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.676336 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b31746fe-e0eb-4ce1-9d20-3abc7b66ebf4-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"b31746fe-e0eb-4ce1-9d20-3abc7b66ebf4\") " pod="openstack/glance-default-external-api-0" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.679024 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b31746fe-e0eb-4ce1-9d20-3abc7b66ebf4-config-data\") pod \"glance-default-external-api-0\" (UID: \"b31746fe-e0eb-4ce1-9d20-3abc7b66ebf4\") " pod="openstack/glance-default-external-api-0" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.691176 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"b31746fe-e0eb-4ce1-9d20-3abc7b66ebf4\") " pod="openstack/glance-default-external-api-0" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.719236 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea" path="/var/lib/kubelet/pods/0380a0bf-646d-4dc6-90e1-7d3b5a21b9ea/volumes" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.720199 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ba309870-1c87-422f-93c3-81e704ee754e" path="/var/lib/kubelet/pods/ba309870-1c87-422f-93c3-81e704ee754e/volumes" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.763320 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 21:43:04 crc kubenswrapper[4911]: I0929 21:43:04.775192 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.021505 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.063476 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/77d2a08f-5a1a-4847-81a8-a160afadf6aa-logs\") pod \"77d2a08f-5a1a-4847-81a8-a160afadf6aa\" (UID: \"77d2a08f-5a1a-4847-81a8-a160afadf6aa\") " Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.063553 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gjcds\" (UniqueName: \"kubernetes.io/projected/77d2a08f-5a1a-4847-81a8-a160afadf6aa-kube-api-access-gjcds\") pod \"77d2a08f-5a1a-4847-81a8-a160afadf6aa\" (UID: \"77d2a08f-5a1a-4847-81a8-a160afadf6aa\") " Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.063653 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/77d2a08f-5a1a-4847-81a8-a160afadf6aa-config-data\") pod \"77d2a08f-5a1a-4847-81a8-a160afadf6aa\" (UID: \"77d2a08f-5a1a-4847-81a8-a160afadf6aa\") " Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.063696 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77d2a08f-5a1a-4847-81a8-a160afadf6aa-combined-ca-bundle\") pod \"77d2a08f-5a1a-4847-81a8-a160afadf6aa\" (UID: \"77d2a08f-5a1a-4847-81a8-a160afadf6aa\") " Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.063843 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/77d2a08f-5a1a-4847-81a8-a160afadf6aa-scripts\") pod \"77d2a08f-5a1a-4847-81a8-a160afadf6aa\" (UID: \"77d2a08f-5a1a-4847-81a8-a160afadf6aa\") " Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.063884 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"77d2a08f-5a1a-4847-81a8-a160afadf6aa\" (UID: \"77d2a08f-5a1a-4847-81a8-a160afadf6aa\") " Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.063914 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/77d2a08f-5a1a-4847-81a8-a160afadf6aa-internal-tls-certs\") pod \"77d2a08f-5a1a-4847-81a8-a160afadf6aa\" (UID: \"77d2a08f-5a1a-4847-81a8-a160afadf6aa\") " Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.063960 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/77d2a08f-5a1a-4847-81a8-a160afadf6aa-httpd-run\") pod \"77d2a08f-5a1a-4847-81a8-a160afadf6aa\" (UID: \"77d2a08f-5a1a-4847-81a8-a160afadf6aa\") " Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.065391 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/77d2a08f-5a1a-4847-81a8-a160afadf6aa-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "77d2a08f-5a1a-4847-81a8-a160afadf6aa" (UID: "77d2a08f-5a1a-4847-81a8-a160afadf6aa"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.069491 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/77d2a08f-5a1a-4847-81a8-a160afadf6aa-scripts" (OuterVolumeSpecName: "scripts") pod "77d2a08f-5a1a-4847-81a8-a160afadf6aa" (UID: "77d2a08f-5a1a-4847-81a8-a160afadf6aa"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.070056 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/77d2a08f-5a1a-4847-81a8-a160afadf6aa-logs" (OuterVolumeSpecName: "logs") pod "77d2a08f-5a1a-4847-81a8-a160afadf6aa" (UID: "77d2a08f-5a1a-4847-81a8-a160afadf6aa"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.073604 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "glance") pod "77d2a08f-5a1a-4847-81a8-a160afadf6aa" (UID: "77d2a08f-5a1a-4847-81a8-a160afadf6aa"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.073850 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/77d2a08f-5a1a-4847-81a8-a160afadf6aa-kube-api-access-gjcds" (OuterVolumeSpecName: "kube-api-access-gjcds") pod "77d2a08f-5a1a-4847-81a8-a160afadf6aa" (UID: "77d2a08f-5a1a-4847-81a8-a160afadf6aa"). InnerVolumeSpecName "kube-api-access-gjcds". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.103877 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/77d2a08f-5a1a-4847-81a8-a160afadf6aa-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "77d2a08f-5a1a-4847-81a8-a160afadf6aa" (UID: "77d2a08f-5a1a-4847-81a8-a160afadf6aa"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.122291 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/77d2a08f-5a1a-4847-81a8-a160afadf6aa-config-data" (OuterVolumeSpecName: "config-data") pod "77d2a08f-5a1a-4847-81a8-a160afadf6aa" (UID: "77d2a08f-5a1a-4847-81a8-a160afadf6aa"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.124722 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/77d2a08f-5a1a-4847-81a8-a160afadf6aa-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "77d2a08f-5a1a-4847-81a8-a160afadf6aa" (UID: "77d2a08f-5a1a-4847-81a8-a160afadf6aa"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.166642 4911 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/77d2a08f-5a1a-4847-81a8-a160afadf6aa-httpd-run\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.166669 4911 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/77d2a08f-5a1a-4847-81a8-a160afadf6aa-logs\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.166679 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gjcds\" (UniqueName: \"kubernetes.io/projected/77d2a08f-5a1a-4847-81a8-a160afadf6aa-kube-api-access-gjcds\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.166689 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/77d2a08f-5a1a-4847-81a8-a160afadf6aa-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.166699 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77d2a08f-5a1a-4847-81a8-a160afadf6aa-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.166706 4911 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/77d2a08f-5a1a-4847-81a8-a160afadf6aa-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.166737 4911 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.166745 4911 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/77d2a08f-5a1a-4847-81a8-a160afadf6aa-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.202038 4911 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.268535 4911 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.276806 4911 generic.go:334] "Generic (PLEG): container finished" podID="77d2a08f-5a1a-4847-81a8-a160afadf6aa" containerID="611630bd6f301abb0a92730c9bab0a0a1f05ea2f08d68536c17a12bd207233c2" exitCode=0 Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.276873 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.276918 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"77d2a08f-5a1a-4847-81a8-a160afadf6aa","Type":"ContainerDied","Data":"611630bd6f301abb0a92730c9bab0a0a1f05ea2f08d68536c17a12bd207233c2"} Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.277001 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"77d2a08f-5a1a-4847-81a8-a160afadf6aa","Type":"ContainerDied","Data":"e1cce8a028b7709bc7edb400625fbe77faa4a1bf4156bbecdc2cf0bff187f1be"} Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.277030 4911 scope.go:117] "RemoveContainer" containerID="611630bd6f301abb0a92730c9bab0a0a1f05ea2f08d68536c17a12bd207233c2" Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.308604 4911 scope.go:117] "RemoveContainer" containerID="a7949bce4d8e3792e9e327667dfbc6e575e8c13b5ea06a6dd1dda31aa1560f2f" Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.317782 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 21:43:05 crc kubenswrapper[4911]: W0929 21:43:05.319326 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb31746fe_e0eb_4ce1_9d20_3abc7b66ebf4.slice/crio-c724c7326902f085a3bc95744d7c5568e50517e9dc7039ecb699c842804bf5f8 WatchSource:0}: Error finding container c724c7326902f085a3bc95744d7c5568e50517e9dc7039ecb699c842804bf5f8: Status 404 returned error can't find the container with id c724c7326902f085a3bc95744d7c5568e50517e9dc7039ecb699c842804bf5f8 Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.342810 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.353073 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.362301 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 21:43:05 crc kubenswrapper[4911]: E0929 21:43:05.362645 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77d2a08f-5a1a-4847-81a8-a160afadf6aa" containerName="glance-log" Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.362660 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="77d2a08f-5a1a-4847-81a8-a160afadf6aa" containerName="glance-log" Sep 29 21:43:05 crc kubenswrapper[4911]: E0929 21:43:05.362673 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77d2a08f-5a1a-4847-81a8-a160afadf6aa" containerName="glance-httpd" Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.362680 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="77d2a08f-5a1a-4847-81a8-a160afadf6aa" containerName="glance-httpd" Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.362884 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="77d2a08f-5a1a-4847-81a8-a160afadf6aa" containerName="glance-httpd" Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.362908 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="77d2a08f-5a1a-4847-81a8-a160afadf6aa" containerName="glance-log" Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.363831 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.367879 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.368042 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.373927 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.374138 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.387188 4911 scope.go:117] "RemoveContainer" containerID="611630bd6f301abb0a92730c9bab0a0a1f05ea2f08d68536c17a12bd207233c2" Sep 29 21:43:05 crc kubenswrapper[4911]: E0929 21:43:05.392846 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"611630bd6f301abb0a92730c9bab0a0a1f05ea2f08d68536c17a12bd207233c2\": container with ID starting with 611630bd6f301abb0a92730c9bab0a0a1f05ea2f08d68536c17a12bd207233c2 not found: ID does not exist" containerID="611630bd6f301abb0a92730c9bab0a0a1f05ea2f08d68536c17a12bd207233c2" Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.392902 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"611630bd6f301abb0a92730c9bab0a0a1f05ea2f08d68536c17a12bd207233c2"} err="failed to get container status \"611630bd6f301abb0a92730c9bab0a0a1f05ea2f08d68536c17a12bd207233c2\": rpc error: code = NotFound desc = could not find container \"611630bd6f301abb0a92730c9bab0a0a1f05ea2f08d68536c17a12bd207233c2\": container with ID starting with 611630bd6f301abb0a92730c9bab0a0a1f05ea2f08d68536c17a12bd207233c2 not found: ID does not exist" Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.392937 4911 scope.go:117] "RemoveContainer" containerID="a7949bce4d8e3792e9e327667dfbc6e575e8c13b5ea06a6dd1dda31aa1560f2f" Sep 29 21:43:05 crc kubenswrapper[4911]: E0929 21:43:05.395820 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a7949bce4d8e3792e9e327667dfbc6e575e8c13b5ea06a6dd1dda31aa1560f2f\": container with ID starting with a7949bce4d8e3792e9e327667dfbc6e575e8c13b5ea06a6dd1dda31aa1560f2f not found: ID does not exist" containerID="a7949bce4d8e3792e9e327667dfbc6e575e8c13b5ea06a6dd1dda31aa1560f2f" Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.395864 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a7949bce4d8e3792e9e327667dfbc6e575e8c13b5ea06a6dd1dda31aa1560f2f"} err="failed to get container status \"a7949bce4d8e3792e9e327667dfbc6e575e8c13b5ea06a6dd1dda31aa1560f2f\": rpc error: code = NotFound desc = could not find container \"a7949bce4d8e3792e9e327667dfbc6e575e8c13b5ea06a6dd1dda31aa1560f2f\": container with ID starting with a7949bce4d8e3792e9e327667dfbc6e575e8c13b5ea06a6dd1dda31aa1560f2f not found: ID does not exist" Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.473197 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2ae35250-2041-4b17-8829-f0d982384d7e-scripts\") pod \"glance-default-internal-api-0\" (UID: \"2ae35250-2041-4b17-8829-f0d982384d7e\") " pod="openstack/glance-default-internal-api-0" Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.473252 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g8959\" (UniqueName: \"kubernetes.io/projected/2ae35250-2041-4b17-8829-f0d982384d7e-kube-api-access-g8959\") pod \"glance-default-internal-api-0\" (UID: \"2ae35250-2041-4b17-8829-f0d982384d7e\") " pod="openstack/glance-default-internal-api-0" Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.473321 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2ae35250-2041-4b17-8829-f0d982384d7e-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"2ae35250-2041-4b17-8829-f0d982384d7e\") " pod="openstack/glance-default-internal-api-0" Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.473341 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ae35250-2041-4b17-8829-f0d982384d7e-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"2ae35250-2041-4b17-8829-f0d982384d7e\") " pod="openstack/glance-default-internal-api-0" Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.473384 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2ae35250-2041-4b17-8829-f0d982384d7e-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"2ae35250-2041-4b17-8829-f0d982384d7e\") " pod="openstack/glance-default-internal-api-0" Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.473432 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"2ae35250-2041-4b17-8829-f0d982384d7e\") " pod="openstack/glance-default-internal-api-0" Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.473473 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ae35250-2041-4b17-8829-f0d982384d7e-config-data\") pod \"glance-default-internal-api-0\" (UID: \"2ae35250-2041-4b17-8829-f0d982384d7e\") " pod="openstack/glance-default-internal-api-0" Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.473502 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2ae35250-2041-4b17-8829-f0d982384d7e-logs\") pod \"glance-default-internal-api-0\" (UID: \"2ae35250-2041-4b17-8829-f0d982384d7e\") " pod="openstack/glance-default-internal-api-0" Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.575747 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g8959\" (UniqueName: \"kubernetes.io/projected/2ae35250-2041-4b17-8829-f0d982384d7e-kube-api-access-g8959\") pod \"glance-default-internal-api-0\" (UID: \"2ae35250-2041-4b17-8829-f0d982384d7e\") " pod="openstack/glance-default-internal-api-0" Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.575844 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2ae35250-2041-4b17-8829-f0d982384d7e-scripts\") pod \"glance-default-internal-api-0\" (UID: \"2ae35250-2041-4b17-8829-f0d982384d7e\") " pod="openstack/glance-default-internal-api-0" Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.576631 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2ae35250-2041-4b17-8829-f0d982384d7e-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"2ae35250-2041-4b17-8829-f0d982384d7e\") " pod="openstack/glance-default-internal-api-0" Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.576679 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ae35250-2041-4b17-8829-f0d982384d7e-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"2ae35250-2041-4b17-8829-f0d982384d7e\") " pod="openstack/glance-default-internal-api-0" Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.576714 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2ae35250-2041-4b17-8829-f0d982384d7e-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"2ae35250-2041-4b17-8829-f0d982384d7e\") " pod="openstack/glance-default-internal-api-0" Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.576774 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"2ae35250-2041-4b17-8829-f0d982384d7e\") " pod="openstack/glance-default-internal-api-0" Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.576823 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ae35250-2041-4b17-8829-f0d982384d7e-config-data\") pod \"glance-default-internal-api-0\" (UID: \"2ae35250-2041-4b17-8829-f0d982384d7e\") " pod="openstack/glance-default-internal-api-0" Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.576861 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2ae35250-2041-4b17-8829-f0d982384d7e-logs\") pod \"glance-default-internal-api-0\" (UID: \"2ae35250-2041-4b17-8829-f0d982384d7e\") " pod="openstack/glance-default-internal-api-0" Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.577243 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2ae35250-2041-4b17-8829-f0d982384d7e-logs\") pod \"glance-default-internal-api-0\" (UID: \"2ae35250-2041-4b17-8829-f0d982384d7e\") " pod="openstack/glance-default-internal-api-0" Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.578555 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2ae35250-2041-4b17-8829-f0d982384d7e-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"2ae35250-2041-4b17-8829-f0d982384d7e\") " pod="openstack/glance-default-internal-api-0" Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.581526 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2ae35250-2041-4b17-8829-f0d982384d7e-scripts\") pod \"glance-default-internal-api-0\" (UID: \"2ae35250-2041-4b17-8829-f0d982384d7e\") " pod="openstack/glance-default-internal-api-0" Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.581625 4911 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"2ae35250-2041-4b17-8829-f0d982384d7e\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/glance-default-internal-api-0" Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.583539 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ae35250-2041-4b17-8829-f0d982384d7e-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"2ae35250-2041-4b17-8829-f0d982384d7e\") " pod="openstack/glance-default-internal-api-0" Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.585005 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2ae35250-2041-4b17-8829-f0d982384d7e-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"2ae35250-2041-4b17-8829-f0d982384d7e\") " pod="openstack/glance-default-internal-api-0" Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.588836 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ae35250-2041-4b17-8829-f0d982384d7e-config-data\") pod \"glance-default-internal-api-0\" (UID: \"2ae35250-2041-4b17-8829-f0d982384d7e\") " pod="openstack/glance-default-internal-api-0" Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.594921 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g8959\" (UniqueName: \"kubernetes.io/projected/2ae35250-2041-4b17-8829-f0d982384d7e-kube-api-access-g8959\") pod \"glance-default-internal-api-0\" (UID: \"2ae35250-2041-4b17-8829-f0d982384d7e\") " pod="openstack/glance-default-internal-api-0" Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.618391 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"2ae35250-2041-4b17-8829-f0d982384d7e\") " pod="openstack/glance-default-internal-api-0" Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.718612 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.940825 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-jvp47"] Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.965524 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-jvp47"] Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.965681 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-jvp47" Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.969062 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.969247 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-vtqgg" Sep 29 21:43:05 crc kubenswrapper[4911]: I0929 21:43:05.971198 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Sep 29 21:43:06 crc kubenswrapper[4911]: I0929 21:43:06.101178 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9a020ecb-1b48-4428-a32f-8593034ab88b-scripts\") pod \"nova-cell0-conductor-db-sync-jvp47\" (UID: \"9a020ecb-1b48-4428-a32f-8593034ab88b\") " pod="openstack/nova-cell0-conductor-db-sync-jvp47" Sep 29 21:43:06 crc kubenswrapper[4911]: I0929 21:43:06.101598 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9jj9l\" (UniqueName: \"kubernetes.io/projected/9a020ecb-1b48-4428-a32f-8593034ab88b-kube-api-access-9jj9l\") pod \"nova-cell0-conductor-db-sync-jvp47\" (UID: \"9a020ecb-1b48-4428-a32f-8593034ab88b\") " pod="openstack/nova-cell0-conductor-db-sync-jvp47" Sep 29 21:43:06 crc kubenswrapper[4911]: I0929 21:43:06.101633 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9a020ecb-1b48-4428-a32f-8593034ab88b-config-data\") pod \"nova-cell0-conductor-db-sync-jvp47\" (UID: \"9a020ecb-1b48-4428-a32f-8593034ab88b\") " pod="openstack/nova-cell0-conductor-db-sync-jvp47" Sep 29 21:43:06 crc kubenswrapper[4911]: I0929 21:43:06.101738 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a020ecb-1b48-4428-a32f-8593034ab88b-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-jvp47\" (UID: \"9a020ecb-1b48-4428-a32f-8593034ab88b\") " pod="openstack/nova-cell0-conductor-db-sync-jvp47" Sep 29 21:43:06 crc kubenswrapper[4911]: I0929 21:43:06.203540 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a020ecb-1b48-4428-a32f-8593034ab88b-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-jvp47\" (UID: \"9a020ecb-1b48-4428-a32f-8593034ab88b\") " pod="openstack/nova-cell0-conductor-db-sync-jvp47" Sep 29 21:43:06 crc kubenswrapper[4911]: I0929 21:43:06.203657 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9a020ecb-1b48-4428-a32f-8593034ab88b-scripts\") pod \"nova-cell0-conductor-db-sync-jvp47\" (UID: \"9a020ecb-1b48-4428-a32f-8593034ab88b\") " pod="openstack/nova-cell0-conductor-db-sync-jvp47" Sep 29 21:43:06 crc kubenswrapper[4911]: I0929 21:43:06.203730 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9jj9l\" (UniqueName: \"kubernetes.io/projected/9a020ecb-1b48-4428-a32f-8593034ab88b-kube-api-access-9jj9l\") pod \"nova-cell0-conductor-db-sync-jvp47\" (UID: \"9a020ecb-1b48-4428-a32f-8593034ab88b\") " pod="openstack/nova-cell0-conductor-db-sync-jvp47" Sep 29 21:43:06 crc kubenswrapper[4911]: I0929 21:43:06.203758 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9a020ecb-1b48-4428-a32f-8593034ab88b-config-data\") pod \"nova-cell0-conductor-db-sync-jvp47\" (UID: \"9a020ecb-1b48-4428-a32f-8593034ab88b\") " pod="openstack/nova-cell0-conductor-db-sync-jvp47" Sep 29 21:43:06 crc kubenswrapper[4911]: I0929 21:43:06.208373 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9a020ecb-1b48-4428-a32f-8593034ab88b-config-data\") pod \"nova-cell0-conductor-db-sync-jvp47\" (UID: \"9a020ecb-1b48-4428-a32f-8593034ab88b\") " pod="openstack/nova-cell0-conductor-db-sync-jvp47" Sep 29 21:43:06 crc kubenswrapper[4911]: I0929 21:43:06.209602 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9a020ecb-1b48-4428-a32f-8593034ab88b-scripts\") pod \"nova-cell0-conductor-db-sync-jvp47\" (UID: \"9a020ecb-1b48-4428-a32f-8593034ab88b\") " pod="openstack/nova-cell0-conductor-db-sync-jvp47" Sep 29 21:43:06 crc kubenswrapper[4911]: I0929 21:43:06.211369 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a020ecb-1b48-4428-a32f-8593034ab88b-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-jvp47\" (UID: \"9a020ecb-1b48-4428-a32f-8593034ab88b\") " pod="openstack/nova-cell0-conductor-db-sync-jvp47" Sep 29 21:43:06 crc kubenswrapper[4911]: I0929 21:43:06.230100 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9jj9l\" (UniqueName: \"kubernetes.io/projected/9a020ecb-1b48-4428-a32f-8593034ab88b-kube-api-access-9jj9l\") pod \"nova-cell0-conductor-db-sync-jvp47\" (UID: \"9a020ecb-1b48-4428-a32f-8593034ab88b\") " pod="openstack/nova-cell0-conductor-db-sync-jvp47" Sep 29 21:43:06 crc kubenswrapper[4911]: I0929 21:43:06.293541 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f5cf68f0-dbfb-4256-8ca2-349a4658eb96","Type":"ContainerStarted","Data":"d2f4cf0fa852fb3497d99c8b19fd685f07b597481dd5a6a52a5b9734431764d3"} Sep 29 21:43:06 crc kubenswrapper[4911]: I0929 21:43:06.293578 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f5cf68f0-dbfb-4256-8ca2-349a4658eb96","Type":"ContainerStarted","Data":"5990fb2a07e015d3fdffdcbc8a70fe8ce18bb887659b6ab3971f13173f8fc82a"} Sep 29 21:43:06 crc kubenswrapper[4911]: I0929 21:43:06.295765 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b31746fe-e0eb-4ce1-9d20-3abc7b66ebf4","Type":"ContainerStarted","Data":"35602730275d454d758cedcf5c32ef1883325652332c5c03399875fe7e706898"} Sep 29 21:43:06 crc kubenswrapper[4911]: I0929 21:43:06.295805 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b31746fe-e0eb-4ce1-9d20-3abc7b66ebf4","Type":"ContainerStarted","Data":"c724c7326902f085a3bc95744d7c5568e50517e9dc7039ecb699c842804bf5f8"} Sep 29 21:43:06 crc kubenswrapper[4911]: I0929 21:43:06.306290 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-jvp47" Sep 29 21:43:06 crc kubenswrapper[4911]: I0929 21:43:06.318218 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 21:43:06 crc kubenswrapper[4911]: I0929 21:43:06.721484 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="77d2a08f-5a1a-4847-81a8-a160afadf6aa" path="/var/lib/kubelet/pods/77d2a08f-5a1a-4847-81a8-a160afadf6aa/volumes" Sep 29 21:43:07 crc kubenswrapper[4911]: I0929 21:43:07.135264 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-jvp47"] Sep 29 21:43:07 crc kubenswrapper[4911]: W0929 21:43:07.147175 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9a020ecb_1b48_4428_a32f_8593034ab88b.slice/crio-9bfe6ae16155344fb7e0663c738141182a25b6f3074d1cf1990418623495d7a8 WatchSource:0}: Error finding container 9bfe6ae16155344fb7e0663c738141182a25b6f3074d1cf1990418623495d7a8: Status 404 returned error can't find the container with id 9bfe6ae16155344fb7e0663c738141182a25b6f3074d1cf1990418623495d7a8 Sep 29 21:43:07 crc kubenswrapper[4911]: I0929 21:43:07.308472 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b31746fe-e0eb-4ce1-9d20-3abc7b66ebf4","Type":"ContainerStarted","Data":"413cc00a4867b60331262245b6ff783b8cb6c603cdf4cd7c0a197d4bf082f39a"} Sep 29 21:43:07 crc kubenswrapper[4911]: I0929 21:43:07.310467 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"2ae35250-2041-4b17-8829-f0d982384d7e","Type":"ContainerStarted","Data":"faa771e1d6c539d93fe05caf45db273dd01b83a390840871a256685e365b4acd"} Sep 29 21:43:07 crc kubenswrapper[4911]: I0929 21:43:07.310511 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"2ae35250-2041-4b17-8829-f0d982384d7e","Type":"ContainerStarted","Data":"712519fe9d00c7cf3c79740453454eefdc70a573c51e906db5ee4554f6cd2707"} Sep 29 21:43:07 crc kubenswrapper[4911]: I0929 21:43:07.313682 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f5cf68f0-dbfb-4256-8ca2-349a4658eb96","Type":"ContainerStarted","Data":"611dd79306c7ac6d89338191729065825896b3edc765b5be371e82b62e23e36d"} Sep 29 21:43:07 crc kubenswrapper[4911]: I0929 21:43:07.314785 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-jvp47" event={"ID":"9a020ecb-1b48-4428-a32f-8593034ab88b","Type":"ContainerStarted","Data":"9bfe6ae16155344fb7e0663c738141182a25b6f3074d1cf1990418623495d7a8"} Sep 29 21:43:07 crc kubenswrapper[4911]: I0929 21:43:07.332146 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.332128904 podStartE2EDuration="3.332128904s" podCreationTimestamp="2025-09-29 21:43:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:43:07.324412294 +0000 UTC m=+1065.301524965" watchObservedRunningTime="2025-09-29 21:43:07.332128904 +0000 UTC m=+1065.309241585" Sep 29 21:43:08 crc kubenswrapper[4911]: I0929 21:43:08.331908 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f5cf68f0-dbfb-4256-8ca2-349a4658eb96","Type":"ContainerStarted","Data":"d8368f2ae422f60b0094f2aa180ad42bc8e34efa936f6599a84ac1644db935a0"} Sep 29 21:43:08 crc kubenswrapper[4911]: I0929 21:43:08.337573 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"2ae35250-2041-4b17-8829-f0d982384d7e","Type":"ContainerStarted","Data":"5fd27b7ae1f21edf41ab8a0d98b76aaaa98e15e57b28b6ccd28be9566556aaba"} Sep 29 21:43:08 crc kubenswrapper[4911]: I0929 21:43:08.362160 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.362139011 podStartE2EDuration="3.362139011s" podCreationTimestamp="2025-09-29 21:43:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:43:08.3576042 +0000 UTC m=+1066.334716891" watchObservedRunningTime="2025-09-29 21:43:08.362139011 +0000 UTC m=+1066.339251692" Sep 29 21:43:08 crc kubenswrapper[4911]: I0929 21:43:08.735175 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Sep 29 21:43:09 crc kubenswrapper[4911]: I0929 21:43:09.351225 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f5cf68f0-dbfb-4256-8ca2-349a4658eb96","Type":"ContainerStarted","Data":"96cc7c6fc80b61dfd73960cbd85c2e90f537636742c5a527ab2dc0d7bea193e6"} Sep 29 21:43:09 crc kubenswrapper[4911]: I0929 21:43:09.351471 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 29 21:43:09 crc kubenswrapper[4911]: I0929 21:43:09.371093 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.04656773 podStartE2EDuration="5.371077634s" podCreationTimestamp="2025-09-29 21:43:04 +0000 UTC" firstStartedPulling="2025-09-29 21:43:05.387016794 +0000 UTC m=+1063.364129465" lastFinishedPulling="2025-09-29 21:43:08.711526688 +0000 UTC m=+1066.688639369" observedRunningTime="2025-09-29 21:43:09.366282346 +0000 UTC m=+1067.343395017" watchObservedRunningTime="2025-09-29 21:43:09.371077634 +0000 UTC m=+1067.348190305" Sep 29 21:43:11 crc kubenswrapper[4911]: I0929 21:43:11.001276 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 21:43:12 crc kubenswrapper[4911]: I0929 21:43:12.375601 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f5cf68f0-dbfb-4256-8ca2-349a4658eb96" containerName="sg-core" containerID="cri-o://d8368f2ae422f60b0094f2aa180ad42bc8e34efa936f6599a84ac1644db935a0" gracePeriod=30 Sep 29 21:43:12 crc kubenswrapper[4911]: I0929 21:43:12.375625 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f5cf68f0-dbfb-4256-8ca2-349a4658eb96" containerName="proxy-httpd" containerID="cri-o://96cc7c6fc80b61dfd73960cbd85c2e90f537636742c5a527ab2dc0d7bea193e6" gracePeriod=30 Sep 29 21:43:12 crc kubenswrapper[4911]: I0929 21:43:12.375563 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f5cf68f0-dbfb-4256-8ca2-349a4658eb96" containerName="ceilometer-central-agent" containerID="cri-o://d2f4cf0fa852fb3497d99c8b19fd685f07b597481dd5a6a52a5b9734431764d3" gracePeriod=30 Sep 29 21:43:12 crc kubenswrapper[4911]: I0929 21:43:12.375675 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f5cf68f0-dbfb-4256-8ca2-349a4658eb96" containerName="ceilometer-notification-agent" containerID="cri-o://611dd79306c7ac6d89338191729065825896b3edc765b5be371e82b62e23e36d" gracePeriod=30 Sep 29 21:43:13 crc kubenswrapper[4911]: I0929 21:43:13.396732 4911 generic.go:334] "Generic (PLEG): container finished" podID="f5cf68f0-dbfb-4256-8ca2-349a4658eb96" containerID="96cc7c6fc80b61dfd73960cbd85c2e90f537636742c5a527ab2dc0d7bea193e6" exitCode=0 Sep 29 21:43:13 crc kubenswrapper[4911]: I0929 21:43:13.397083 4911 generic.go:334] "Generic (PLEG): container finished" podID="f5cf68f0-dbfb-4256-8ca2-349a4658eb96" containerID="d8368f2ae422f60b0094f2aa180ad42bc8e34efa936f6599a84ac1644db935a0" exitCode=2 Sep 29 21:43:13 crc kubenswrapper[4911]: I0929 21:43:13.397094 4911 generic.go:334] "Generic (PLEG): container finished" podID="f5cf68f0-dbfb-4256-8ca2-349a4658eb96" containerID="611dd79306c7ac6d89338191729065825896b3edc765b5be371e82b62e23e36d" exitCode=0 Sep 29 21:43:13 crc kubenswrapper[4911]: I0929 21:43:13.397102 4911 generic.go:334] "Generic (PLEG): container finished" podID="f5cf68f0-dbfb-4256-8ca2-349a4658eb96" containerID="d2f4cf0fa852fb3497d99c8b19fd685f07b597481dd5a6a52a5b9734431764d3" exitCode=0 Sep 29 21:43:13 crc kubenswrapper[4911]: I0929 21:43:13.396825 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f5cf68f0-dbfb-4256-8ca2-349a4658eb96","Type":"ContainerDied","Data":"96cc7c6fc80b61dfd73960cbd85c2e90f537636742c5a527ab2dc0d7bea193e6"} Sep 29 21:43:13 crc kubenswrapper[4911]: I0929 21:43:13.397138 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f5cf68f0-dbfb-4256-8ca2-349a4658eb96","Type":"ContainerDied","Data":"d8368f2ae422f60b0094f2aa180ad42bc8e34efa936f6599a84ac1644db935a0"} Sep 29 21:43:13 crc kubenswrapper[4911]: I0929 21:43:13.397152 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f5cf68f0-dbfb-4256-8ca2-349a4658eb96","Type":"ContainerDied","Data":"611dd79306c7ac6d89338191729065825896b3edc765b5be371e82b62e23e36d"} Sep 29 21:43:13 crc kubenswrapper[4911]: I0929 21:43:13.397164 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f5cf68f0-dbfb-4256-8ca2-349a4658eb96","Type":"ContainerDied","Data":"d2f4cf0fa852fb3497d99c8b19fd685f07b597481dd5a6a52a5b9734431764d3"} Sep 29 21:43:14 crc kubenswrapper[4911]: I0929 21:43:14.776323 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Sep 29 21:43:14 crc kubenswrapper[4911]: I0929 21:43:14.776571 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Sep 29 21:43:14 crc kubenswrapper[4911]: I0929 21:43:14.837142 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Sep 29 21:43:14 crc kubenswrapper[4911]: I0929 21:43:14.851562 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Sep 29 21:43:14 crc kubenswrapper[4911]: I0929 21:43:14.918946 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 21:43:15 crc kubenswrapper[4911]: I0929 21:43:15.090190 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f5cf68f0-dbfb-4256-8ca2-349a4658eb96-scripts\") pod \"f5cf68f0-dbfb-4256-8ca2-349a4658eb96\" (UID: \"f5cf68f0-dbfb-4256-8ca2-349a4658eb96\") " Sep 29 21:43:15 crc kubenswrapper[4911]: I0929 21:43:15.090439 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5cf68f0-dbfb-4256-8ca2-349a4658eb96-combined-ca-bundle\") pod \"f5cf68f0-dbfb-4256-8ca2-349a4658eb96\" (UID: \"f5cf68f0-dbfb-4256-8ca2-349a4658eb96\") " Sep 29 21:43:15 crc kubenswrapper[4911]: I0929 21:43:15.090542 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f5cf68f0-dbfb-4256-8ca2-349a4658eb96-sg-core-conf-yaml\") pod \"f5cf68f0-dbfb-4256-8ca2-349a4658eb96\" (UID: \"f5cf68f0-dbfb-4256-8ca2-349a4658eb96\") " Sep 29 21:43:15 crc kubenswrapper[4911]: I0929 21:43:15.090651 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5cf68f0-dbfb-4256-8ca2-349a4658eb96-config-data\") pod \"f5cf68f0-dbfb-4256-8ca2-349a4658eb96\" (UID: \"f5cf68f0-dbfb-4256-8ca2-349a4658eb96\") " Sep 29 21:43:15 crc kubenswrapper[4911]: I0929 21:43:15.091153 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8lr26\" (UniqueName: \"kubernetes.io/projected/f5cf68f0-dbfb-4256-8ca2-349a4658eb96-kube-api-access-8lr26\") pod \"f5cf68f0-dbfb-4256-8ca2-349a4658eb96\" (UID: \"f5cf68f0-dbfb-4256-8ca2-349a4658eb96\") " Sep 29 21:43:15 crc kubenswrapper[4911]: I0929 21:43:15.091282 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f5cf68f0-dbfb-4256-8ca2-349a4658eb96-run-httpd\") pod \"f5cf68f0-dbfb-4256-8ca2-349a4658eb96\" (UID: \"f5cf68f0-dbfb-4256-8ca2-349a4658eb96\") " Sep 29 21:43:15 crc kubenswrapper[4911]: I0929 21:43:15.091427 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f5cf68f0-dbfb-4256-8ca2-349a4658eb96-log-httpd\") pod \"f5cf68f0-dbfb-4256-8ca2-349a4658eb96\" (UID: \"f5cf68f0-dbfb-4256-8ca2-349a4658eb96\") " Sep 29 21:43:15 crc kubenswrapper[4911]: I0929 21:43:15.091570 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f5cf68f0-dbfb-4256-8ca2-349a4658eb96-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "f5cf68f0-dbfb-4256-8ca2-349a4658eb96" (UID: "f5cf68f0-dbfb-4256-8ca2-349a4658eb96"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:43:15 crc kubenswrapper[4911]: I0929 21:43:15.092024 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f5cf68f0-dbfb-4256-8ca2-349a4658eb96-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "f5cf68f0-dbfb-4256-8ca2-349a4658eb96" (UID: "f5cf68f0-dbfb-4256-8ca2-349a4658eb96"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:43:15 crc kubenswrapper[4911]: I0929 21:43:15.092223 4911 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f5cf68f0-dbfb-4256-8ca2-349a4658eb96-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:15 crc kubenswrapper[4911]: I0929 21:43:15.094094 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5cf68f0-dbfb-4256-8ca2-349a4658eb96-scripts" (OuterVolumeSpecName: "scripts") pod "f5cf68f0-dbfb-4256-8ca2-349a4658eb96" (UID: "f5cf68f0-dbfb-4256-8ca2-349a4658eb96"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:43:15 crc kubenswrapper[4911]: I0929 21:43:15.097200 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5cf68f0-dbfb-4256-8ca2-349a4658eb96-kube-api-access-8lr26" (OuterVolumeSpecName: "kube-api-access-8lr26") pod "f5cf68f0-dbfb-4256-8ca2-349a4658eb96" (UID: "f5cf68f0-dbfb-4256-8ca2-349a4658eb96"). InnerVolumeSpecName "kube-api-access-8lr26". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:43:15 crc kubenswrapper[4911]: I0929 21:43:15.114674 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5cf68f0-dbfb-4256-8ca2-349a4658eb96-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "f5cf68f0-dbfb-4256-8ca2-349a4658eb96" (UID: "f5cf68f0-dbfb-4256-8ca2-349a4658eb96"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:43:15 crc kubenswrapper[4911]: I0929 21:43:15.153359 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5cf68f0-dbfb-4256-8ca2-349a4658eb96-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f5cf68f0-dbfb-4256-8ca2-349a4658eb96" (UID: "f5cf68f0-dbfb-4256-8ca2-349a4658eb96"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:43:15 crc kubenswrapper[4911]: I0929 21:43:15.172159 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5cf68f0-dbfb-4256-8ca2-349a4658eb96-config-data" (OuterVolumeSpecName: "config-data") pod "f5cf68f0-dbfb-4256-8ca2-349a4658eb96" (UID: "f5cf68f0-dbfb-4256-8ca2-349a4658eb96"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:43:15 crc kubenswrapper[4911]: I0929 21:43:15.194745 4911 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f5cf68f0-dbfb-4256-8ca2-349a4658eb96-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:15 crc kubenswrapper[4911]: I0929 21:43:15.194779 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5cf68f0-dbfb-4256-8ca2-349a4658eb96-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:15 crc kubenswrapper[4911]: I0929 21:43:15.194810 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8lr26\" (UniqueName: \"kubernetes.io/projected/f5cf68f0-dbfb-4256-8ca2-349a4658eb96-kube-api-access-8lr26\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:15 crc kubenswrapper[4911]: I0929 21:43:15.194823 4911 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f5cf68f0-dbfb-4256-8ca2-349a4658eb96-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:15 crc kubenswrapper[4911]: I0929 21:43:15.194834 4911 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f5cf68f0-dbfb-4256-8ca2-349a4658eb96-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:15 crc kubenswrapper[4911]: I0929 21:43:15.194848 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5cf68f0-dbfb-4256-8ca2-349a4658eb96-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:15 crc kubenswrapper[4911]: I0929 21:43:15.429165 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f5cf68f0-dbfb-4256-8ca2-349a4658eb96","Type":"ContainerDied","Data":"5990fb2a07e015d3fdffdcbc8a70fe8ce18bb887659b6ab3971f13173f8fc82a"} Sep 29 21:43:15 crc kubenswrapper[4911]: I0929 21:43:15.429243 4911 scope.go:117] "RemoveContainer" containerID="96cc7c6fc80b61dfd73960cbd85c2e90f537636742c5a527ab2dc0d7bea193e6" Sep 29 21:43:15 crc kubenswrapper[4911]: I0929 21:43:15.429188 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 21:43:15 crc kubenswrapper[4911]: I0929 21:43:15.431539 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-jvp47" event={"ID":"9a020ecb-1b48-4428-a32f-8593034ab88b","Type":"ContainerStarted","Data":"7684130f2b8f0aeb2fee8e849b221b5170e0407f57279e8b6356fe5d175e4eb0"} Sep 29 21:43:15 crc kubenswrapper[4911]: I0929 21:43:15.432102 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Sep 29 21:43:15 crc kubenswrapper[4911]: I0929 21:43:15.432191 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Sep 29 21:43:15 crc kubenswrapper[4911]: I0929 21:43:15.448715 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-jvp47" podStartSLOduration=2.928356866 podStartE2EDuration="10.448696898s" podCreationTimestamp="2025-09-29 21:43:05 +0000 UTC" firstStartedPulling="2025-09-29 21:43:07.14951176 +0000 UTC m=+1065.126624431" lastFinishedPulling="2025-09-29 21:43:14.669851792 +0000 UTC m=+1072.646964463" observedRunningTime="2025-09-29 21:43:15.448259774 +0000 UTC m=+1073.425372495" watchObservedRunningTime="2025-09-29 21:43:15.448696898 +0000 UTC m=+1073.425809589" Sep 29 21:43:15 crc kubenswrapper[4911]: I0929 21:43:15.455617 4911 scope.go:117] "RemoveContainer" containerID="d8368f2ae422f60b0094f2aa180ad42bc8e34efa936f6599a84ac1644db935a0" Sep 29 21:43:15 crc kubenswrapper[4911]: I0929 21:43:15.484893 4911 scope.go:117] "RemoveContainer" containerID="611dd79306c7ac6d89338191729065825896b3edc765b5be371e82b62e23e36d" Sep 29 21:43:15 crc kubenswrapper[4911]: I0929 21:43:15.493038 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 21:43:15 crc kubenswrapper[4911]: I0929 21:43:15.501433 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 29 21:43:15 crc kubenswrapper[4911]: I0929 21:43:15.530991 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 29 21:43:15 crc kubenswrapper[4911]: E0929 21:43:15.531891 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5cf68f0-dbfb-4256-8ca2-349a4658eb96" containerName="ceilometer-notification-agent" Sep 29 21:43:15 crc kubenswrapper[4911]: I0929 21:43:15.531914 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5cf68f0-dbfb-4256-8ca2-349a4658eb96" containerName="ceilometer-notification-agent" Sep 29 21:43:15 crc kubenswrapper[4911]: E0929 21:43:15.531932 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5cf68f0-dbfb-4256-8ca2-349a4658eb96" containerName="sg-core" Sep 29 21:43:15 crc kubenswrapper[4911]: I0929 21:43:15.531941 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5cf68f0-dbfb-4256-8ca2-349a4658eb96" containerName="sg-core" Sep 29 21:43:15 crc kubenswrapper[4911]: E0929 21:43:15.531978 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5cf68f0-dbfb-4256-8ca2-349a4658eb96" containerName="proxy-httpd" Sep 29 21:43:15 crc kubenswrapper[4911]: I0929 21:43:15.531987 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5cf68f0-dbfb-4256-8ca2-349a4658eb96" containerName="proxy-httpd" Sep 29 21:43:15 crc kubenswrapper[4911]: E0929 21:43:15.532021 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5cf68f0-dbfb-4256-8ca2-349a4658eb96" containerName="ceilometer-central-agent" Sep 29 21:43:15 crc kubenswrapper[4911]: I0929 21:43:15.532030 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5cf68f0-dbfb-4256-8ca2-349a4658eb96" containerName="ceilometer-central-agent" Sep 29 21:43:15 crc kubenswrapper[4911]: I0929 21:43:15.532928 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5cf68f0-dbfb-4256-8ca2-349a4658eb96" containerName="sg-core" Sep 29 21:43:15 crc kubenswrapper[4911]: I0929 21:43:15.533034 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5cf68f0-dbfb-4256-8ca2-349a4658eb96" containerName="proxy-httpd" Sep 29 21:43:15 crc kubenswrapper[4911]: I0929 21:43:15.533064 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5cf68f0-dbfb-4256-8ca2-349a4658eb96" containerName="ceilometer-notification-agent" Sep 29 21:43:15 crc kubenswrapper[4911]: I0929 21:43:15.533084 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5cf68f0-dbfb-4256-8ca2-349a4658eb96" containerName="ceilometer-central-agent" Sep 29 21:43:15 crc kubenswrapper[4911]: I0929 21:43:15.539330 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 21:43:15 crc kubenswrapper[4911]: I0929 21:43:15.543341 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 29 21:43:15 crc kubenswrapper[4911]: I0929 21:43:15.543645 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 29 21:43:15 crc kubenswrapper[4911]: I0929 21:43:15.548706 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 21:43:15 crc kubenswrapper[4911]: I0929 21:43:15.552960 4911 scope.go:117] "RemoveContainer" containerID="d2f4cf0fa852fb3497d99c8b19fd685f07b597481dd5a6a52a5b9734431764d3" Sep 29 21:43:15 crc kubenswrapper[4911]: I0929 21:43:15.706560 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f7dqf\" (UniqueName: \"kubernetes.io/projected/f8460aed-5642-4bd8-8719-2d8a18055a5f-kube-api-access-f7dqf\") pod \"ceilometer-0\" (UID: \"f8460aed-5642-4bd8-8719-2d8a18055a5f\") " pod="openstack/ceilometer-0" Sep 29 21:43:15 crc kubenswrapper[4911]: I0929 21:43:15.706883 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f8460aed-5642-4bd8-8719-2d8a18055a5f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f8460aed-5642-4bd8-8719-2d8a18055a5f\") " pod="openstack/ceilometer-0" Sep 29 21:43:15 crc kubenswrapper[4911]: I0929 21:43:15.706908 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f8460aed-5642-4bd8-8719-2d8a18055a5f-run-httpd\") pod \"ceilometer-0\" (UID: \"f8460aed-5642-4bd8-8719-2d8a18055a5f\") " pod="openstack/ceilometer-0" Sep 29 21:43:15 crc kubenswrapper[4911]: I0929 21:43:15.706932 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f8460aed-5642-4bd8-8719-2d8a18055a5f-config-data\") pod \"ceilometer-0\" (UID: \"f8460aed-5642-4bd8-8719-2d8a18055a5f\") " pod="openstack/ceilometer-0" Sep 29 21:43:15 crc kubenswrapper[4911]: I0929 21:43:15.706957 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f8460aed-5642-4bd8-8719-2d8a18055a5f-scripts\") pod \"ceilometer-0\" (UID: \"f8460aed-5642-4bd8-8719-2d8a18055a5f\") " pod="openstack/ceilometer-0" Sep 29 21:43:15 crc kubenswrapper[4911]: I0929 21:43:15.706995 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f8460aed-5642-4bd8-8719-2d8a18055a5f-log-httpd\") pod \"ceilometer-0\" (UID: \"f8460aed-5642-4bd8-8719-2d8a18055a5f\") " pod="openstack/ceilometer-0" Sep 29 21:43:15 crc kubenswrapper[4911]: I0929 21:43:15.707008 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8460aed-5642-4bd8-8719-2d8a18055a5f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f8460aed-5642-4bd8-8719-2d8a18055a5f\") " pod="openstack/ceilometer-0" Sep 29 21:43:15 crc kubenswrapper[4911]: I0929 21:43:15.719109 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Sep 29 21:43:15 crc kubenswrapper[4911]: I0929 21:43:15.719140 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Sep 29 21:43:15 crc kubenswrapper[4911]: I0929 21:43:15.751059 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Sep 29 21:43:15 crc kubenswrapper[4911]: I0929 21:43:15.776980 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Sep 29 21:43:15 crc kubenswrapper[4911]: I0929 21:43:15.808337 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f7dqf\" (UniqueName: \"kubernetes.io/projected/f8460aed-5642-4bd8-8719-2d8a18055a5f-kube-api-access-f7dqf\") pod \"ceilometer-0\" (UID: \"f8460aed-5642-4bd8-8719-2d8a18055a5f\") " pod="openstack/ceilometer-0" Sep 29 21:43:15 crc kubenswrapper[4911]: I0929 21:43:15.808396 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f8460aed-5642-4bd8-8719-2d8a18055a5f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f8460aed-5642-4bd8-8719-2d8a18055a5f\") " pod="openstack/ceilometer-0" Sep 29 21:43:15 crc kubenswrapper[4911]: I0929 21:43:15.808440 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f8460aed-5642-4bd8-8719-2d8a18055a5f-run-httpd\") pod \"ceilometer-0\" (UID: \"f8460aed-5642-4bd8-8719-2d8a18055a5f\") " pod="openstack/ceilometer-0" Sep 29 21:43:15 crc kubenswrapper[4911]: I0929 21:43:15.808507 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f8460aed-5642-4bd8-8719-2d8a18055a5f-config-data\") pod \"ceilometer-0\" (UID: \"f8460aed-5642-4bd8-8719-2d8a18055a5f\") " pod="openstack/ceilometer-0" Sep 29 21:43:15 crc kubenswrapper[4911]: I0929 21:43:15.808560 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f8460aed-5642-4bd8-8719-2d8a18055a5f-scripts\") pod \"ceilometer-0\" (UID: \"f8460aed-5642-4bd8-8719-2d8a18055a5f\") " pod="openstack/ceilometer-0" Sep 29 21:43:15 crc kubenswrapper[4911]: I0929 21:43:15.808653 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f8460aed-5642-4bd8-8719-2d8a18055a5f-log-httpd\") pod \"ceilometer-0\" (UID: \"f8460aed-5642-4bd8-8719-2d8a18055a5f\") " pod="openstack/ceilometer-0" Sep 29 21:43:15 crc kubenswrapper[4911]: I0929 21:43:15.808675 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8460aed-5642-4bd8-8719-2d8a18055a5f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f8460aed-5642-4bd8-8719-2d8a18055a5f\") " pod="openstack/ceilometer-0" Sep 29 21:43:15 crc kubenswrapper[4911]: I0929 21:43:15.812419 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f8460aed-5642-4bd8-8719-2d8a18055a5f-run-httpd\") pod \"ceilometer-0\" (UID: \"f8460aed-5642-4bd8-8719-2d8a18055a5f\") " pod="openstack/ceilometer-0" Sep 29 21:43:15 crc kubenswrapper[4911]: I0929 21:43:15.812903 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f8460aed-5642-4bd8-8719-2d8a18055a5f-log-httpd\") pod \"ceilometer-0\" (UID: \"f8460aed-5642-4bd8-8719-2d8a18055a5f\") " pod="openstack/ceilometer-0" Sep 29 21:43:15 crc kubenswrapper[4911]: I0929 21:43:15.816641 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f8460aed-5642-4bd8-8719-2d8a18055a5f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f8460aed-5642-4bd8-8719-2d8a18055a5f\") " pod="openstack/ceilometer-0" Sep 29 21:43:15 crc kubenswrapper[4911]: I0929 21:43:15.816766 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f8460aed-5642-4bd8-8719-2d8a18055a5f-scripts\") pod \"ceilometer-0\" (UID: \"f8460aed-5642-4bd8-8719-2d8a18055a5f\") " pod="openstack/ceilometer-0" Sep 29 21:43:15 crc kubenswrapper[4911]: I0929 21:43:15.817400 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8460aed-5642-4bd8-8719-2d8a18055a5f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f8460aed-5642-4bd8-8719-2d8a18055a5f\") " pod="openstack/ceilometer-0" Sep 29 21:43:15 crc kubenswrapper[4911]: I0929 21:43:15.819975 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f8460aed-5642-4bd8-8719-2d8a18055a5f-config-data\") pod \"ceilometer-0\" (UID: \"f8460aed-5642-4bd8-8719-2d8a18055a5f\") " pod="openstack/ceilometer-0" Sep 29 21:43:15 crc kubenswrapper[4911]: I0929 21:43:15.830073 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f7dqf\" (UniqueName: \"kubernetes.io/projected/f8460aed-5642-4bd8-8719-2d8a18055a5f-kube-api-access-f7dqf\") pod \"ceilometer-0\" (UID: \"f8460aed-5642-4bd8-8719-2d8a18055a5f\") " pod="openstack/ceilometer-0" Sep 29 21:43:15 crc kubenswrapper[4911]: I0929 21:43:15.856348 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 21:43:16 crc kubenswrapper[4911]: I0929 21:43:16.358747 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 21:43:16 crc kubenswrapper[4911]: W0929 21:43:16.360529 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf8460aed_5642_4bd8_8719_2d8a18055a5f.slice/crio-a5dec418f5073e6659490e11e531ca05d0f54fc02ffaa64f4b7ea9229c09a192 WatchSource:0}: Error finding container a5dec418f5073e6659490e11e531ca05d0f54fc02ffaa64f4b7ea9229c09a192: Status 404 returned error can't find the container with id a5dec418f5073e6659490e11e531ca05d0f54fc02ffaa64f4b7ea9229c09a192 Sep 29 21:43:16 crc kubenswrapper[4911]: I0929 21:43:16.445897 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f8460aed-5642-4bd8-8719-2d8a18055a5f","Type":"ContainerStarted","Data":"a5dec418f5073e6659490e11e531ca05d0f54fc02ffaa64f4b7ea9229c09a192"} Sep 29 21:43:16 crc kubenswrapper[4911]: I0929 21:43:16.446509 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Sep 29 21:43:16 crc kubenswrapper[4911]: I0929 21:43:16.446546 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Sep 29 21:43:16 crc kubenswrapper[4911]: I0929 21:43:16.710348 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f5cf68f0-dbfb-4256-8ca2-349a4658eb96" path="/var/lib/kubelet/pods/f5cf68f0-dbfb-4256-8ca2-349a4658eb96/volumes" Sep 29 21:43:17 crc kubenswrapper[4911]: I0929 21:43:17.285513 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Sep 29 21:43:17 crc kubenswrapper[4911]: I0929 21:43:17.290229 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Sep 29 21:43:17 crc kubenswrapper[4911]: I0929 21:43:17.459737 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f8460aed-5642-4bd8-8719-2d8a18055a5f","Type":"ContainerStarted","Data":"94996a606816f75959c32d23866bbbbeb6ddb02d2693308e4bdeeb4d6020e9f5"} Sep 29 21:43:18 crc kubenswrapper[4911]: I0929 21:43:18.364374 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Sep 29 21:43:18 crc kubenswrapper[4911]: I0929 21:43:18.380672 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Sep 29 21:43:18 crc kubenswrapper[4911]: I0929 21:43:18.470745 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f8460aed-5642-4bd8-8719-2d8a18055a5f","Type":"ContainerStarted","Data":"e9f3a7664258d68ae4bcbd1036cb3d76dd9c980d657d019ea2ccb8c498e4074d"} Sep 29 21:43:18 crc kubenswrapper[4911]: I0929 21:43:18.470814 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f8460aed-5642-4bd8-8719-2d8a18055a5f","Type":"ContainerStarted","Data":"2dba839b28bf59c8578ed16cc4b3d68afdd0d3d90d01271f9d72ac6ac80e109a"} Sep 29 21:43:20 crc kubenswrapper[4911]: I0929 21:43:20.502451 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f8460aed-5642-4bd8-8719-2d8a18055a5f","Type":"ContainerStarted","Data":"dc0dcbd39f0fcd2d68e33b76cdd79343bc90b98e25e0e9696695c14e3472f348"} Sep 29 21:43:20 crc kubenswrapper[4911]: I0929 21:43:20.503130 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 29 21:43:20 crc kubenswrapper[4911]: I0929 21:43:20.527130 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.433236321 podStartE2EDuration="5.527103781s" podCreationTimestamp="2025-09-29 21:43:15 +0000 UTC" firstStartedPulling="2025-09-29 21:43:16.363046028 +0000 UTC m=+1074.340158699" lastFinishedPulling="2025-09-29 21:43:19.456913478 +0000 UTC m=+1077.434026159" observedRunningTime="2025-09-29 21:43:20.523273362 +0000 UTC m=+1078.500386133" watchObservedRunningTime="2025-09-29 21:43:20.527103781 +0000 UTC m=+1078.504216492" Sep 29 21:43:24 crc kubenswrapper[4911]: I0929 21:43:24.570830 4911 generic.go:334] "Generic (PLEG): container finished" podID="9a020ecb-1b48-4428-a32f-8593034ab88b" containerID="7684130f2b8f0aeb2fee8e849b221b5170e0407f57279e8b6356fe5d175e4eb0" exitCode=0 Sep 29 21:43:24 crc kubenswrapper[4911]: I0929 21:43:24.570883 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-jvp47" event={"ID":"9a020ecb-1b48-4428-a32f-8593034ab88b","Type":"ContainerDied","Data":"7684130f2b8f0aeb2fee8e849b221b5170e0407f57279e8b6356fe5d175e4eb0"} Sep 29 21:43:25 crc kubenswrapper[4911]: I0929 21:43:25.211621 4911 patch_prober.go:28] interesting pod/machine-config-daemon-w647f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 21:43:25 crc kubenswrapper[4911]: I0929 21:43:25.211709 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 21:43:25 crc kubenswrapper[4911]: I0929 21:43:25.953212 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-jvp47" Sep 29 21:43:26 crc kubenswrapper[4911]: I0929 21:43:26.047456 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a020ecb-1b48-4428-a32f-8593034ab88b-combined-ca-bundle\") pod \"9a020ecb-1b48-4428-a32f-8593034ab88b\" (UID: \"9a020ecb-1b48-4428-a32f-8593034ab88b\") " Sep 29 21:43:26 crc kubenswrapper[4911]: I0929 21:43:26.047592 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9jj9l\" (UniqueName: \"kubernetes.io/projected/9a020ecb-1b48-4428-a32f-8593034ab88b-kube-api-access-9jj9l\") pod \"9a020ecb-1b48-4428-a32f-8593034ab88b\" (UID: \"9a020ecb-1b48-4428-a32f-8593034ab88b\") " Sep 29 21:43:26 crc kubenswrapper[4911]: I0929 21:43:26.048397 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9a020ecb-1b48-4428-a32f-8593034ab88b-scripts\") pod \"9a020ecb-1b48-4428-a32f-8593034ab88b\" (UID: \"9a020ecb-1b48-4428-a32f-8593034ab88b\") " Sep 29 21:43:26 crc kubenswrapper[4911]: I0929 21:43:26.048504 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9a020ecb-1b48-4428-a32f-8593034ab88b-config-data\") pod \"9a020ecb-1b48-4428-a32f-8593034ab88b\" (UID: \"9a020ecb-1b48-4428-a32f-8593034ab88b\") " Sep 29 21:43:26 crc kubenswrapper[4911]: I0929 21:43:26.055487 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9a020ecb-1b48-4428-a32f-8593034ab88b-scripts" (OuterVolumeSpecName: "scripts") pod "9a020ecb-1b48-4428-a32f-8593034ab88b" (UID: "9a020ecb-1b48-4428-a32f-8593034ab88b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:43:26 crc kubenswrapper[4911]: I0929 21:43:26.056412 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9a020ecb-1b48-4428-a32f-8593034ab88b-kube-api-access-9jj9l" (OuterVolumeSpecName: "kube-api-access-9jj9l") pod "9a020ecb-1b48-4428-a32f-8593034ab88b" (UID: "9a020ecb-1b48-4428-a32f-8593034ab88b"). InnerVolumeSpecName "kube-api-access-9jj9l". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:43:26 crc kubenswrapper[4911]: I0929 21:43:26.080331 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9a020ecb-1b48-4428-a32f-8593034ab88b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9a020ecb-1b48-4428-a32f-8593034ab88b" (UID: "9a020ecb-1b48-4428-a32f-8593034ab88b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:43:26 crc kubenswrapper[4911]: I0929 21:43:26.086783 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9a020ecb-1b48-4428-a32f-8593034ab88b-config-data" (OuterVolumeSpecName: "config-data") pod "9a020ecb-1b48-4428-a32f-8593034ab88b" (UID: "9a020ecb-1b48-4428-a32f-8593034ab88b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:43:26 crc kubenswrapper[4911]: I0929 21:43:26.151280 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9jj9l\" (UniqueName: \"kubernetes.io/projected/9a020ecb-1b48-4428-a32f-8593034ab88b-kube-api-access-9jj9l\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:26 crc kubenswrapper[4911]: I0929 21:43:26.151319 4911 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9a020ecb-1b48-4428-a32f-8593034ab88b-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:26 crc kubenswrapper[4911]: I0929 21:43:26.151333 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9a020ecb-1b48-4428-a32f-8593034ab88b-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:26 crc kubenswrapper[4911]: I0929 21:43:26.151347 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a020ecb-1b48-4428-a32f-8593034ab88b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:26 crc kubenswrapper[4911]: I0929 21:43:26.598445 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-jvp47" event={"ID":"9a020ecb-1b48-4428-a32f-8593034ab88b","Type":"ContainerDied","Data":"9bfe6ae16155344fb7e0663c738141182a25b6f3074d1cf1990418623495d7a8"} Sep 29 21:43:26 crc kubenswrapper[4911]: I0929 21:43:26.598513 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-jvp47" Sep 29 21:43:26 crc kubenswrapper[4911]: I0929 21:43:26.598515 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9bfe6ae16155344fb7e0663c738141182a25b6f3074d1cf1990418623495d7a8" Sep 29 21:43:26 crc kubenswrapper[4911]: I0929 21:43:26.821751 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 29 21:43:26 crc kubenswrapper[4911]: E0929 21:43:26.822737 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a020ecb-1b48-4428-a32f-8593034ab88b" containerName="nova-cell0-conductor-db-sync" Sep 29 21:43:26 crc kubenswrapper[4911]: I0929 21:43:26.822757 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a020ecb-1b48-4428-a32f-8593034ab88b" containerName="nova-cell0-conductor-db-sync" Sep 29 21:43:26 crc kubenswrapper[4911]: I0929 21:43:26.823107 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="9a020ecb-1b48-4428-a32f-8593034ab88b" containerName="nova-cell0-conductor-db-sync" Sep 29 21:43:26 crc kubenswrapper[4911]: I0929 21:43:26.823838 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Sep 29 21:43:26 crc kubenswrapper[4911]: I0929 21:43:26.828012 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Sep 29 21:43:26 crc kubenswrapper[4911]: I0929 21:43:26.828514 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-vtqgg" Sep 29 21:43:26 crc kubenswrapper[4911]: I0929 21:43:26.856596 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 29 21:43:26 crc kubenswrapper[4911]: I0929 21:43:26.971818 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/43d54eea-b9ec-4034-9c29-e9426e27f65b-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"43d54eea-b9ec-4034-9c29-e9426e27f65b\") " pod="openstack/nova-cell0-conductor-0" Sep 29 21:43:26 crc kubenswrapper[4911]: I0929 21:43:26.971942 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s6mc5\" (UniqueName: \"kubernetes.io/projected/43d54eea-b9ec-4034-9c29-e9426e27f65b-kube-api-access-s6mc5\") pod \"nova-cell0-conductor-0\" (UID: \"43d54eea-b9ec-4034-9c29-e9426e27f65b\") " pod="openstack/nova-cell0-conductor-0" Sep 29 21:43:26 crc kubenswrapper[4911]: I0929 21:43:26.972105 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43d54eea-b9ec-4034-9c29-e9426e27f65b-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"43d54eea-b9ec-4034-9c29-e9426e27f65b\") " pod="openstack/nova-cell0-conductor-0" Sep 29 21:43:27 crc kubenswrapper[4911]: I0929 21:43:27.073553 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43d54eea-b9ec-4034-9c29-e9426e27f65b-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"43d54eea-b9ec-4034-9c29-e9426e27f65b\") " pod="openstack/nova-cell0-conductor-0" Sep 29 21:43:27 crc kubenswrapper[4911]: I0929 21:43:27.073634 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/43d54eea-b9ec-4034-9c29-e9426e27f65b-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"43d54eea-b9ec-4034-9c29-e9426e27f65b\") " pod="openstack/nova-cell0-conductor-0" Sep 29 21:43:27 crc kubenswrapper[4911]: I0929 21:43:27.073695 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s6mc5\" (UniqueName: \"kubernetes.io/projected/43d54eea-b9ec-4034-9c29-e9426e27f65b-kube-api-access-s6mc5\") pod \"nova-cell0-conductor-0\" (UID: \"43d54eea-b9ec-4034-9c29-e9426e27f65b\") " pod="openstack/nova-cell0-conductor-0" Sep 29 21:43:27 crc kubenswrapper[4911]: I0929 21:43:27.082708 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/43d54eea-b9ec-4034-9c29-e9426e27f65b-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"43d54eea-b9ec-4034-9c29-e9426e27f65b\") " pod="openstack/nova-cell0-conductor-0" Sep 29 21:43:27 crc kubenswrapper[4911]: I0929 21:43:27.088353 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43d54eea-b9ec-4034-9c29-e9426e27f65b-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"43d54eea-b9ec-4034-9c29-e9426e27f65b\") " pod="openstack/nova-cell0-conductor-0" Sep 29 21:43:27 crc kubenswrapper[4911]: I0929 21:43:27.096236 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s6mc5\" (UniqueName: \"kubernetes.io/projected/43d54eea-b9ec-4034-9c29-e9426e27f65b-kube-api-access-s6mc5\") pod \"nova-cell0-conductor-0\" (UID: \"43d54eea-b9ec-4034-9c29-e9426e27f65b\") " pod="openstack/nova-cell0-conductor-0" Sep 29 21:43:27 crc kubenswrapper[4911]: I0929 21:43:27.161023 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Sep 29 21:43:27 crc kubenswrapper[4911]: I0929 21:43:27.632042 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 29 21:43:27 crc kubenswrapper[4911]: W0929 21:43:27.638287 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod43d54eea_b9ec_4034_9c29_e9426e27f65b.slice/crio-47008095270d09a1810682b84702d926d8b1c5b2c62e36179f37d974f591942d WatchSource:0}: Error finding container 47008095270d09a1810682b84702d926d8b1c5b2c62e36179f37d974f591942d: Status 404 returned error can't find the container with id 47008095270d09a1810682b84702d926d8b1c5b2c62e36179f37d974f591942d Sep 29 21:43:28 crc kubenswrapper[4911]: I0929 21:43:28.628361 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"43d54eea-b9ec-4034-9c29-e9426e27f65b","Type":"ContainerStarted","Data":"8af7cfcad47c883552f7c69d525c483a7091488d14c7433adbf0dc7c0f542b18"} Sep 29 21:43:28 crc kubenswrapper[4911]: I0929 21:43:28.628706 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Sep 29 21:43:28 crc kubenswrapper[4911]: I0929 21:43:28.628728 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"43d54eea-b9ec-4034-9c29-e9426e27f65b","Type":"ContainerStarted","Data":"47008095270d09a1810682b84702d926d8b1c5b2c62e36179f37d974f591942d"} Sep 29 21:43:37 crc kubenswrapper[4911]: I0929 21:43:37.210236 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Sep 29 21:43:37 crc kubenswrapper[4911]: I0929 21:43:37.233781 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=11.233756525 podStartE2EDuration="11.233756525s" podCreationTimestamp="2025-09-29 21:43:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:43:28.658384081 +0000 UTC m=+1086.635496812" watchObservedRunningTime="2025-09-29 21:43:37.233756525 +0000 UTC m=+1095.210869256" Sep 29 21:43:37 crc kubenswrapper[4911]: I0929 21:43:37.723164 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-49f9z"] Sep 29 21:43:37 crc kubenswrapper[4911]: I0929 21:43:37.726710 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-49f9z" Sep 29 21:43:37 crc kubenswrapper[4911]: I0929 21:43:37.728337 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Sep 29 21:43:37 crc kubenswrapper[4911]: I0929 21:43:37.729102 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Sep 29 21:43:37 crc kubenswrapper[4911]: I0929 21:43:37.735590 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-49f9z"] Sep 29 21:43:37 crc kubenswrapper[4911]: I0929 21:43:37.801395 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c8f9423-562b-49fa-946d-10d52101e44c-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-49f9z\" (UID: \"7c8f9423-562b-49fa-946d-10d52101e44c\") " pod="openstack/nova-cell0-cell-mapping-49f9z" Sep 29 21:43:37 crc kubenswrapper[4911]: I0929 21:43:37.801664 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vrdn2\" (UniqueName: \"kubernetes.io/projected/7c8f9423-562b-49fa-946d-10d52101e44c-kube-api-access-vrdn2\") pod \"nova-cell0-cell-mapping-49f9z\" (UID: \"7c8f9423-562b-49fa-946d-10d52101e44c\") " pod="openstack/nova-cell0-cell-mapping-49f9z" Sep 29 21:43:37 crc kubenswrapper[4911]: I0929 21:43:37.801749 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c8f9423-562b-49fa-946d-10d52101e44c-config-data\") pod \"nova-cell0-cell-mapping-49f9z\" (UID: \"7c8f9423-562b-49fa-946d-10d52101e44c\") " pod="openstack/nova-cell0-cell-mapping-49f9z" Sep 29 21:43:37 crc kubenswrapper[4911]: I0929 21:43:37.801873 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7c8f9423-562b-49fa-946d-10d52101e44c-scripts\") pod \"nova-cell0-cell-mapping-49f9z\" (UID: \"7c8f9423-562b-49fa-946d-10d52101e44c\") " pod="openstack/nova-cell0-cell-mapping-49f9z" Sep 29 21:43:37 crc kubenswrapper[4911]: I0929 21:43:37.903818 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c8f9423-562b-49fa-946d-10d52101e44c-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-49f9z\" (UID: \"7c8f9423-562b-49fa-946d-10d52101e44c\") " pod="openstack/nova-cell0-cell-mapping-49f9z" Sep 29 21:43:37 crc kubenswrapper[4911]: I0929 21:43:37.903862 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vrdn2\" (UniqueName: \"kubernetes.io/projected/7c8f9423-562b-49fa-946d-10d52101e44c-kube-api-access-vrdn2\") pod \"nova-cell0-cell-mapping-49f9z\" (UID: \"7c8f9423-562b-49fa-946d-10d52101e44c\") " pod="openstack/nova-cell0-cell-mapping-49f9z" Sep 29 21:43:37 crc kubenswrapper[4911]: I0929 21:43:37.903880 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c8f9423-562b-49fa-946d-10d52101e44c-config-data\") pod \"nova-cell0-cell-mapping-49f9z\" (UID: \"7c8f9423-562b-49fa-946d-10d52101e44c\") " pod="openstack/nova-cell0-cell-mapping-49f9z" Sep 29 21:43:37 crc kubenswrapper[4911]: I0929 21:43:37.903919 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7c8f9423-562b-49fa-946d-10d52101e44c-scripts\") pod \"nova-cell0-cell-mapping-49f9z\" (UID: \"7c8f9423-562b-49fa-946d-10d52101e44c\") " pod="openstack/nova-cell0-cell-mapping-49f9z" Sep 29 21:43:37 crc kubenswrapper[4911]: I0929 21:43:37.912938 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7c8f9423-562b-49fa-946d-10d52101e44c-scripts\") pod \"nova-cell0-cell-mapping-49f9z\" (UID: \"7c8f9423-562b-49fa-946d-10d52101e44c\") " pod="openstack/nova-cell0-cell-mapping-49f9z" Sep 29 21:43:37 crc kubenswrapper[4911]: I0929 21:43:37.913672 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c8f9423-562b-49fa-946d-10d52101e44c-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-49f9z\" (UID: \"7c8f9423-562b-49fa-946d-10d52101e44c\") " pod="openstack/nova-cell0-cell-mapping-49f9z" Sep 29 21:43:37 crc kubenswrapper[4911]: I0929 21:43:37.913786 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c8f9423-562b-49fa-946d-10d52101e44c-config-data\") pod \"nova-cell0-cell-mapping-49f9z\" (UID: \"7c8f9423-562b-49fa-946d-10d52101e44c\") " pod="openstack/nova-cell0-cell-mapping-49f9z" Sep 29 21:43:37 crc kubenswrapper[4911]: I0929 21:43:37.983460 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vrdn2\" (UniqueName: \"kubernetes.io/projected/7c8f9423-562b-49fa-946d-10d52101e44c-kube-api-access-vrdn2\") pod \"nova-cell0-cell-mapping-49f9z\" (UID: \"7c8f9423-562b-49fa-946d-10d52101e44c\") " pod="openstack/nova-cell0-cell-mapping-49f9z" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.027942 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.029017 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.031228 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.045310 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.051557 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-49f9z" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.107841 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sz6xj\" (UniqueName: \"kubernetes.io/projected/72f12417-f4fb-494a-abca-37933bdb860d-kube-api-access-sz6xj\") pod \"nova-scheduler-0\" (UID: \"72f12417-f4fb-494a-abca-37933bdb860d\") " pod="openstack/nova-scheduler-0" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.107965 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72f12417-f4fb-494a-abca-37933bdb860d-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"72f12417-f4fb-494a-abca-37933bdb860d\") " pod="openstack/nova-scheduler-0" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.108025 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72f12417-f4fb-494a-abca-37933bdb860d-config-data\") pod \"nova-scheduler-0\" (UID: \"72f12417-f4fb-494a-abca-37933bdb860d\") " pod="openstack/nova-scheduler-0" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.124509 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.134943 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.143911 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.173012 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.174674 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.178221 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.204621 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.210084 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5d18437e-1f1c-413d-8308-5ffe566350ee-logs\") pod \"nova-metadata-0\" (UID: \"5d18437e-1f1c-413d-8308-5ffe566350ee\") " pod="openstack/nova-metadata-0" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.210173 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72f12417-f4fb-494a-abca-37933bdb860d-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"72f12417-f4fb-494a-abca-37933bdb860d\") " pod="openstack/nova-scheduler-0" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.210251 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72f12417-f4fb-494a-abca-37933bdb860d-config-data\") pod \"nova-scheduler-0\" (UID: \"72f12417-f4fb-494a-abca-37933bdb860d\") " pod="openstack/nova-scheduler-0" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.210321 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sz6xj\" (UniqueName: \"kubernetes.io/projected/72f12417-f4fb-494a-abca-37933bdb860d-kube-api-access-sz6xj\") pod \"nova-scheduler-0\" (UID: \"72f12417-f4fb-494a-abca-37933bdb860d\") " pod="openstack/nova-scheduler-0" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.210351 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d18437e-1f1c-413d-8308-5ffe566350ee-config-data\") pod \"nova-metadata-0\" (UID: \"5d18437e-1f1c-413d-8308-5ffe566350ee\") " pod="openstack/nova-metadata-0" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.210454 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-52s86\" (UniqueName: \"kubernetes.io/projected/5d18437e-1f1c-413d-8308-5ffe566350ee-kube-api-access-52s86\") pod \"nova-metadata-0\" (UID: \"5d18437e-1f1c-413d-8308-5ffe566350ee\") " pod="openstack/nova-metadata-0" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.210516 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d18437e-1f1c-413d-8308-5ffe566350ee-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"5d18437e-1f1c-413d-8308-5ffe566350ee\") " pod="openstack/nova-metadata-0" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.226451 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72f12417-f4fb-494a-abca-37933bdb860d-config-data\") pod \"nova-scheduler-0\" (UID: \"72f12417-f4fb-494a-abca-37933bdb860d\") " pod="openstack/nova-scheduler-0" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.235161 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.240464 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72f12417-f4fb-494a-abca-37933bdb860d-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"72f12417-f4fb-494a-abca-37933bdb860d\") " pod="openstack/nova-scheduler-0" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.255475 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sz6xj\" (UniqueName: \"kubernetes.io/projected/72f12417-f4fb-494a-abca-37933bdb860d-kube-api-access-sz6xj\") pod \"nova-scheduler-0\" (UID: \"72f12417-f4fb-494a-abca-37933bdb860d\") " pod="openstack/nova-scheduler-0" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.300030 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-2srf4"] Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.313633 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d18437e-1f1c-413d-8308-5ffe566350ee-config-data\") pod \"nova-metadata-0\" (UID: \"5d18437e-1f1c-413d-8308-5ffe566350ee\") " pod="openstack/nova-metadata-0" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.313682 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-brr78\" (UniqueName: \"kubernetes.io/projected/01ef4ed4-a76d-4896-9c3e-e62abb21d2cb-kube-api-access-brr78\") pod \"nova-api-0\" (UID: \"01ef4ed4-a76d-4896-9c3e-e62abb21d2cb\") " pod="openstack/nova-api-0" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.313715 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-52s86\" (UniqueName: \"kubernetes.io/projected/5d18437e-1f1c-413d-8308-5ffe566350ee-kube-api-access-52s86\") pod \"nova-metadata-0\" (UID: \"5d18437e-1f1c-413d-8308-5ffe566350ee\") " pod="openstack/nova-metadata-0" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.313746 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d18437e-1f1c-413d-8308-5ffe566350ee-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"5d18437e-1f1c-413d-8308-5ffe566350ee\") " pod="openstack/nova-metadata-0" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.313786 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5d18437e-1f1c-413d-8308-5ffe566350ee-logs\") pod \"nova-metadata-0\" (UID: \"5d18437e-1f1c-413d-8308-5ffe566350ee\") " pod="openstack/nova-metadata-0" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.313822 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/01ef4ed4-a76d-4896-9c3e-e62abb21d2cb-logs\") pod \"nova-api-0\" (UID: \"01ef4ed4-a76d-4896-9c3e-e62abb21d2cb\") " pod="openstack/nova-api-0" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.313871 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/01ef4ed4-a76d-4896-9c3e-e62abb21d2cb-config-data\") pod \"nova-api-0\" (UID: \"01ef4ed4-a76d-4896-9c3e-e62abb21d2cb\") " pod="openstack/nova-api-0" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.313905 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01ef4ed4-a76d-4896-9c3e-e62abb21d2cb-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"01ef4ed4-a76d-4896-9c3e-e62abb21d2cb\") " pod="openstack/nova-api-0" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.314010 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-845d6d6f59-2srf4" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.322407 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d18437e-1f1c-413d-8308-5ffe566350ee-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"5d18437e-1f1c-413d-8308-5ffe566350ee\") " pod="openstack/nova-metadata-0" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.326215 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5d18437e-1f1c-413d-8308-5ffe566350ee-logs\") pod \"nova-metadata-0\" (UID: \"5d18437e-1f1c-413d-8308-5ffe566350ee\") " pod="openstack/nova-metadata-0" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.336160 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d18437e-1f1c-413d-8308-5ffe566350ee-config-data\") pod \"nova-metadata-0\" (UID: \"5d18437e-1f1c-413d-8308-5ffe566350ee\") " pod="openstack/nova-metadata-0" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.345521 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.347663 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-2srf4"] Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.352585 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-52s86\" (UniqueName: \"kubernetes.io/projected/5d18437e-1f1c-413d-8308-5ffe566350ee-kube-api-access-52s86\") pod \"nova-metadata-0\" (UID: \"5d18437e-1f1c-413d-8308-5ffe566350ee\") " pod="openstack/nova-metadata-0" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.388733 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.389985 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.398969 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.400376 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.418098 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bjvjx\" (UniqueName: \"kubernetes.io/projected/3046cee5-d66c-4a66-bed7-2f4a36df4113-kube-api-access-bjvjx\") pod \"dnsmasq-dns-845d6d6f59-2srf4\" (UID: \"3046cee5-d66c-4a66-bed7-2f4a36df4113\") " pod="openstack/dnsmasq-dns-845d6d6f59-2srf4" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.418137 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/01ef4ed4-a76d-4896-9c3e-e62abb21d2cb-config-data\") pod \"nova-api-0\" (UID: \"01ef4ed4-a76d-4896-9c3e-e62abb21d2cb\") " pod="openstack/nova-api-0" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.418633 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01ef4ed4-a76d-4896-9c3e-e62abb21d2cb-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"01ef4ed4-a76d-4896-9c3e-e62abb21d2cb\") " pod="openstack/nova-api-0" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.418665 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3046cee5-d66c-4a66-bed7-2f4a36df4113-ovsdbserver-nb\") pod \"dnsmasq-dns-845d6d6f59-2srf4\" (UID: \"3046cee5-d66c-4a66-bed7-2f4a36df4113\") " pod="openstack/dnsmasq-dns-845d6d6f59-2srf4" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.418685 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3046cee5-d66c-4a66-bed7-2f4a36df4113-dns-swift-storage-0\") pod \"dnsmasq-dns-845d6d6f59-2srf4\" (UID: \"3046cee5-d66c-4a66-bed7-2f4a36df4113\") " pod="openstack/dnsmasq-dns-845d6d6f59-2srf4" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.418701 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3046cee5-d66c-4a66-bed7-2f4a36df4113-ovsdbserver-sb\") pod \"dnsmasq-dns-845d6d6f59-2srf4\" (UID: \"3046cee5-d66c-4a66-bed7-2f4a36df4113\") " pod="openstack/dnsmasq-dns-845d6d6f59-2srf4" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.418760 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-brr78\" (UniqueName: \"kubernetes.io/projected/01ef4ed4-a76d-4896-9c3e-e62abb21d2cb-kube-api-access-brr78\") pod \"nova-api-0\" (UID: \"01ef4ed4-a76d-4896-9c3e-e62abb21d2cb\") " pod="openstack/nova-api-0" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.418820 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3046cee5-d66c-4a66-bed7-2f4a36df4113-config\") pod \"dnsmasq-dns-845d6d6f59-2srf4\" (UID: \"3046cee5-d66c-4a66-bed7-2f4a36df4113\") " pod="openstack/dnsmasq-dns-845d6d6f59-2srf4" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.418837 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3046cee5-d66c-4a66-bed7-2f4a36df4113-dns-svc\") pod \"dnsmasq-dns-845d6d6f59-2srf4\" (UID: \"3046cee5-d66c-4a66-bed7-2f4a36df4113\") " pod="openstack/dnsmasq-dns-845d6d6f59-2srf4" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.418890 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/01ef4ed4-a76d-4896-9c3e-e62abb21d2cb-logs\") pod \"nova-api-0\" (UID: \"01ef4ed4-a76d-4896-9c3e-e62abb21d2cb\") " pod="openstack/nova-api-0" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.419191 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/01ef4ed4-a76d-4896-9c3e-e62abb21d2cb-logs\") pod \"nova-api-0\" (UID: \"01ef4ed4-a76d-4896-9c3e-e62abb21d2cb\") " pod="openstack/nova-api-0" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.429008 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/01ef4ed4-a76d-4896-9c3e-e62abb21d2cb-config-data\") pod \"nova-api-0\" (UID: \"01ef4ed4-a76d-4896-9c3e-e62abb21d2cb\") " pod="openstack/nova-api-0" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.434607 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01ef4ed4-a76d-4896-9c3e-e62abb21d2cb-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"01ef4ed4-a76d-4896-9c3e-e62abb21d2cb\") " pod="openstack/nova-api-0" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.440734 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-brr78\" (UniqueName: \"kubernetes.io/projected/01ef4ed4-a76d-4896-9c3e-e62abb21d2cb-kube-api-access-brr78\") pod \"nova-api-0\" (UID: \"01ef4ed4-a76d-4896-9c3e-e62abb21d2cb\") " pod="openstack/nova-api-0" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.520750 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5189e725-065a-4a70-8e07-4b19758add3c-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"5189e725-065a-4a70-8e07-4b19758add3c\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.521147 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3046cee5-d66c-4a66-bed7-2f4a36df4113-config\") pod \"dnsmasq-dns-845d6d6f59-2srf4\" (UID: \"3046cee5-d66c-4a66-bed7-2f4a36df4113\") " pod="openstack/dnsmasq-dns-845d6d6f59-2srf4" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.521164 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3046cee5-d66c-4a66-bed7-2f4a36df4113-dns-svc\") pod \"dnsmasq-dns-845d6d6f59-2srf4\" (UID: \"3046cee5-d66c-4a66-bed7-2f4a36df4113\") " pod="openstack/dnsmasq-dns-845d6d6f59-2srf4" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.521221 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5189e725-065a-4a70-8e07-4b19758add3c-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"5189e725-065a-4a70-8e07-4b19758add3c\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.521274 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bjvjx\" (UniqueName: \"kubernetes.io/projected/3046cee5-d66c-4a66-bed7-2f4a36df4113-kube-api-access-bjvjx\") pod \"dnsmasq-dns-845d6d6f59-2srf4\" (UID: \"3046cee5-d66c-4a66-bed7-2f4a36df4113\") " pod="openstack/dnsmasq-dns-845d6d6f59-2srf4" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.521315 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3046cee5-d66c-4a66-bed7-2f4a36df4113-ovsdbserver-nb\") pod \"dnsmasq-dns-845d6d6f59-2srf4\" (UID: \"3046cee5-d66c-4a66-bed7-2f4a36df4113\") " pod="openstack/dnsmasq-dns-845d6d6f59-2srf4" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.521339 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3046cee5-d66c-4a66-bed7-2f4a36df4113-dns-swift-storage-0\") pod \"dnsmasq-dns-845d6d6f59-2srf4\" (UID: \"3046cee5-d66c-4a66-bed7-2f4a36df4113\") " pod="openstack/dnsmasq-dns-845d6d6f59-2srf4" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.521359 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3046cee5-d66c-4a66-bed7-2f4a36df4113-ovsdbserver-sb\") pod \"dnsmasq-dns-845d6d6f59-2srf4\" (UID: \"3046cee5-d66c-4a66-bed7-2f4a36df4113\") " pod="openstack/dnsmasq-dns-845d6d6f59-2srf4" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.521434 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p62s6\" (UniqueName: \"kubernetes.io/projected/5189e725-065a-4a70-8e07-4b19758add3c-kube-api-access-p62s6\") pod \"nova-cell1-novncproxy-0\" (UID: \"5189e725-065a-4a70-8e07-4b19758add3c\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.522092 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3046cee5-d66c-4a66-bed7-2f4a36df4113-config\") pod \"dnsmasq-dns-845d6d6f59-2srf4\" (UID: \"3046cee5-d66c-4a66-bed7-2f4a36df4113\") " pod="openstack/dnsmasq-dns-845d6d6f59-2srf4" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.522152 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3046cee5-d66c-4a66-bed7-2f4a36df4113-dns-svc\") pod \"dnsmasq-dns-845d6d6f59-2srf4\" (UID: \"3046cee5-d66c-4a66-bed7-2f4a36df4113\") " pod="openstack/dnsmasq-dns-845d6d6f59-2srf4" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.522480 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3046cee5-d66c-4a66-bed7-2f4a36df4113-ovsdbserver-sb\") pod \"dnsmasq-dns-845d6d6f59-2srf4\" (UID: \"3046cee5-d66c-4a66-bed7-2f4a36df4113\") " pod="openstack/dnsmasq-dns-845d6d6f59-2srf4" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.522768 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3046cee5-d66c-4a66-bed7-2f4a36df4113-dns-swift-storage-0\") pod \"dnsmasq-dns-845d6d6f59-2srf4\" (UID: \"3046cee5-d66c-4a66-bed7-2f4a36df4113\") " pod="openstack/dnsmasq-dns-845d6d6f59-2srf4" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.523070 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3046cee5-d66c-4a66-bed7-2f4a36df4113-ovsdbserver-nb\") pod \"dnsmasq-dns-845d6d6f59-2srf4\" (UID: \"3046cee5-d66c-4a66-bed7-2f4a36df4113\") " pod="openstack/dnsmasq-dns-845d6d6f59-2srf4" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.538768 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.547925 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bjvjx\" (UniqueName: \"kubernetes.io/projected/3046cee5-d66c-4a66-bed7-2f4a36df4113-kube-api-access-bjvjx\") pod \"dnsmasq-dns-845d6d6f59-2srf4\" (UID: \"3046cee5-d66c-4a66-bed7-2f4a36df4113\") " pod="openstack/dnsmasq-dns-845d6d6f59-2srf4" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.567904 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.622868 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p62s6\" (UniqueName: \"kubernetes.io/projected/5189e725-065a-4a70-8e07-4b19758add3c-kube-api-access-p62s6\") pod \"nova-cell1-novncproxy-0\" (UID: \"5189e725-065a-4a70-8e07-4b19758add3c\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.622915 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5189e725-065a-4a70-8e07-4b19758add3c-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"5189e725-065a-4a70-8e07-4b19758add3c\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.622971 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5189e725-065a-4a70-8e07-4b19758add3c-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"5189e725-065a-4a70-8e07-4b19758add3c\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.627136 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5189e725-065a-4a70-8e07-4b19758add3c-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"5189e725-065a-4a70-8e07-4b19758add3c\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.627222 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5189e725-065a-4a70-8e07-4b19758add3c-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"5189e725-065a-4a70-8e07-4b19758add3c\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.640563 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p62s6\" (UniqueName: \"kubernetes.io/projected/5189e725-065a-4a70-8e07-4b19758add3c-kube-api-access-p62s6\") pod \"nova-cell1-novncproxy-0\" (UID: \"5189e725-065a-4a70-8e07-4b19758add3c\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.651537 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-845d6d6f59-2srf4" Sep 29 21:43:38 crc kubenswrapper[4911]: W0929 21:43:38.727225 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7c8f9423_562b_49fa_946d_10d52101e44c.slice/crio-091a3d21c3df913fccbc4a36cb3ce428ff231a4e395dcad58fbfe273ab2c062d WatchSource:0}: Error finding container 091a3d21c3df913fccbc4a36cb3ce428ff231a4e395dcad58fbfe273ab2c062d: Status 404 returned error can't find the container with id 091a3d21c3df913fccbc4a36cb3ce428ff231a4e395dcad58fbfe273ab2c062d Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.731033 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-49f9z"] Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.739716 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.752050 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-49f9z" event={"ID":"7c8f9423-562b-49fa-946d-10d52101e44c","Type":"ContainerStarted","Data":"091a3d21c3df913fccbc4a36cb3ce428ff231a4e395dcad58fbfe273ab2c062d"} Sep 29 21:43:38 crc kubenswrapper[4911]: I0929 21:43:38.882141 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 21:43:38 crc kubenswrapper[4911]: W0929 21:43:38.923989 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod72f12417_f4fb_494a_abca_37933bdb860d.slice/crio-a67bc82218d6c32ddb1488ebadaaceca10153facace219489971e5489c146489 WatchSource:0}: Error finding container a67bc82218d6c32ddb1488ebadaaceca10153facace219489971e5489c146489: Status 404 returned error can't find the container with id a67bc82218d6c32ddb1488ebadaaceca10153facace219489971e5489c146489 Sep 29 21:43:39 crc kubenswrapper[4911]: I0929 21:43:39.009827 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-xpzwc"] Sep 29 21:43:39 crc kubenswrapper[4911]: I0929 21:43:39.011381 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-xpzwc" Sep 29 21:43:39 crc kubenswrapper[4911]: I0929 21:43:39.014246 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Sep 29 21:43:39 crc kubenswrapper[4911]: I0929 21:43:39.014480 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Sep 29 21:43:39 crc kubenswrapper[4911]: W0929 21:43:39.029777 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod01ef4ed4_a76d_4896_9c3e_e62abb21d2cb.slice/crio-0226b37e9a1ad402d36b0204f6b1274cfe7b841d5bc0324a0be3b140f6ba1e33 WatchSource:0}: Error finding container 0226b37e9a1ad402d36b0204f6b1274cfe7b841d5bc0324a0be3b140f6ba1e33: Status 404 returned error can't find the container with id 0226b37e9a1ad402d36b0204f6b1274cfe7b841d5bc0324a0be3b140f6ba1e33 Sep 29 21:43:39 crc kubenswrapper[4911]: I0929 21:43:39.039261 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 29 21:43:39 crc kubenswrapper[4911]: I0929 21:43:39.073112 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-xpzwc"] Sep 29 21:43:39 crc kubenswrapper[4911]: W0929 21:43:39.074457 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5d18437e_1f1c_413d_8308_5ffe566350ee.slice/crio-bc2d5188e4f0922f8b9fa3b8f8e6e218c03aa47b210943cf230711a110df43e8 WatchSource:0}: Error finding container bc2d5188e4f0922f8b9fa3b8f8e6e218c03aa47b210943cf230711a110df43e8: Status 404 returned error can't find the container with id bc2d5188e4f0922f8b9fa3b8f8e6e218c03aa47b210943cf230711a110df43e8 Sep 29 21:43:39 crc kubenswrapper[4911]: I0929 21:43:39.127370 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 21:43:39 crc kubenswrapper[4911]: I0929 21:43:39.135919 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/823859df-f000-4749-acfe-eb9168574272-config-data\") pod \"nova-cell1-conductor-db-sync-xpzwc\" (UID: \"823859df-f000-4749-acfe-eb9168574272\") " pod="openstack/nova-cell1-conductor-db-sync-xpzwc" Sep 29 21:43:39 crc kubenswrapper[4911]: I0929 21:43:39.135982 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/823859df-f000-4749-acfe-eb9168574272-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-xpzwc\" (UID: \"823859df-f000-4749-acfe-eb9168574272\") " pod="openstack/nova-cell1-conductor-db-sync-xpzwc" Sep 29 21:43:39 crc kubenswrapper[4911]: I0929 21:43:39.136019 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fclhz\" (UniqueName: \"kubernetes.io/projected/823859df-f000-4749-acfe-eb9168574272-kube-api-access-fclhz\") pod \"nova-cell1-conductor-db-sync-xpzwc\" (UID: \"823859df-f000-4749-acfe-eb9168574272\") " pod="openstack/nova-cell1-conductor-db-sync-xpzwc" Sep 29 21:43:39 crc kubenswrapper[4911]: I0929 21:43:39.136059 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/823859df-f000-4749-acfe-eb9168574272-scripts\") pod \"nova-cell1-conductor-db-sync-xpzwc\" (UID: \"823859df-f000-4749-acfe-eb9168574272\") " pod="openstack/nova-cell1-conductor-db-sync-xpzwc" Sep 29 21:43:39 crc kubenswrapper[4911]: I0929 21:43:39.237985 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/823859df-f000-4749-acfe-eb9168574272-scripts\") pod \"nova-cell1-conductor-db-sync-xpzwc\" (UID: \"823859df-f000-4749-acfe-eb9168574272\") " pod="openstack/nova-cell1-conductor-db-sync-xpzwc" Sep 29 21:43:39 crc kubenswrapper[4911]: I0929 21:43:39.238104 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/823859df-f000-4749-acfe-eb9168574272-config-data\") pod \"nova-cell1-conductor-db-sync-xpzwc\" (UID: \"823859df-f000-4749-acfe-eb9168574272\") " pod="openstack/nova-cell1-conductor-db-sync-xpzwc" Sep 29 21:43:39 crc kubenswrapper[4911]: I0929 21:43:39.238144 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/823859df-f000-4749-acfe-eb9168574272-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-xpzwc\" (UID: \"823859df-f000-4749-acfe-eb9168574272\") " pod="openstack/nova-cell1-conductor-db-sync-xpzwc" Sep 29 21:43:39 crc kubenswrapper[4911]: I0929 21:43:39.238186 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fclhz\" (UniqueName: \"kubernetes.io/projected/823859df-f000-4749-acfe-eb9168574272-kube-api-access-fclhz\") pod \"nova-cell1-conductor-db-sync-xpzwc\" (UID: \"823859df-f000-4749-acfe-eb9168574272\") " pod="openstack/nova-cell1-conductor-db-sync-xpzwc" Sep 29 21:43:39 crc kubenswrapper[4911]: I0929 21:43:39.244726 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/823859df-f000-4749-acfe-eb9168574272-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-xpzwc\" (UID: \"823859df-f000-4749-acfe-eb9168574272\") " pod="openstack/nova-cell1-conductor-db-sync-xpzwc" Sep 29 21:43:39 crc kubenswrapper[4911]: I0929 21:43:39.247390 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/823859df-f000-4749-acfe-eb9168574272-scripts\") pod \"nova-cell1-conductor-db-sync-xpzwc\" (UID: \"823859df-f000-4749-acfe-eb9168574272\") " pod="openstack/nova-cell1-conductor-db-sync-xpzwc" Sep 29 21:43:39 crc kubenswrapper[4911]: I0929 21:43:39.247685 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/823859df-f000-4749-acfe-eb9168574272-config-data\") pod \"nova-cell1-conductor-db-sync-xpzwc\" (UID: \"823859df-f000-4749-acfe-eb9168574272\") " pod="openstack/nova-cell1-conductor-db-sync-xpzwc" Sep 29 21:43:39 crc kubenswrapper[4911]: I0929 21:43:39.256299 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fclhz\" (UniqueName: \"kubernetes.io/projected/823859df-f000-4749-acfe-eb9168574272-kube-api-access-fclhz\") pod \"nova-cell1-conductor-db-sync-xpzwc\" (UID: \"823859df-f000-4749-acfe-eb9168574272\") " pod="openstack/nova-cell1-conductor-db-sync-xpzwc" Sep 29 21:43:39 crc kubenswrapper[4911]: I0929 21:43:39.288433 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-2srf4"] Sep 29 21:43:39 crc kubenswrapper[4911]: I0929 21:43:39.445419 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-xpzwc" Sep 29 21:43:39 crc kubenswrapper[4911]: I0929 21:43:39.482507 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 21:43:39 crc kubenswrapper[4911]: I0929 21:43:39.813955 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"01ef4ed4-a76d-4896-9c3e-e62abb21d2cb","Type":"ContainerStarted","Data":"0226b37e9a1ad402d36b0204f6b1274cfe7b841d5bc0324a0be3b140f6ba1e33"} Sep 29 21:43:39 crc kubenswrapper[4911]: I0929 21:43:39.824732 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"5189e725-065a-4a70-8e07-4b19758add3c","Type":"ContainerStarted","Data":"9c5f8988bfb591bd75bbe5ac8f886839ddb78a59b1836b626d0c2651ff61ec7a"} Sep 29 21:43:39 crc kubenswrapper[4911]: I0929 21:43:39.830144 4911 generic.go:334] "Generic (PLEG): container finished" podID="3046cee5-d66c-4a66-bed7-2f4a36df4113" containerID="263923d068d9cf0997a5cbbb0415219e28ee27d137c5bc4e826fd3c3d361a89b" exitCode=0 Sep 29 21:43:39 crc kubenswrapper[4911]: I0929 21:43:39.830248 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845d6d6f59-2srf4" event={"ID":"3046cee5-d66c-4a66-bed7-2f4a36df4113","Type":"ContainerDied","Data":"263923d068d9cf0997a5cbbb0415219e28ee27d137c5bc4e826fd3c3d361a89b"} Sep 29 21:43:39 crc kubenswrapper[4911]: I0929 21:43:39.830274 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845d6d6f59-2srf4" event={"ID":"3046cee5-d66c-4a66-bed7-2f4a36df4113","Type":"ContainerStarted","Data":"0af6f1781c8ae1bc7264ff5db1ce6b25a27d0c6617ec83387794708d00bc722e"} Sep 29 21:43:39 crc kubenswrapper[4911]: I0929 21:43:39.833218 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5d18437e-1f1c-413d-8308-5ffe566350ee","Type":"ContainerStarted","Data":"bc2d5188e4f0922f8b9fa3b8f8e6e218c03aa47b210943cf230711a110df43e8"} Sep 29 21:43:39 crc kubenswrapper[4911]: I0929 21:43:39.839632 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-49f9z" event={"ID":"7c8f9423-562b-49fa-946d-10d52101e44c","Type":"ContainerStarted","Data":"8f60832b0ac2ad7edac27180158f9865b7ee0aa997d04e16793bce56d5f08973"} Sep 29 21:43:39 crc kubenswrapper[4911]: I0929 21:43:39.848848 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"72f12417-f4fb-494a-abca-37933bdb860d","Type":"ContainerStarted","Data":"a67bc82218d6c32ddb1488ebadaaceca10153facace219489971e5489c146489"} Sep 29 21:43:39 crc kubenswrapper[4911]: I0929 21:43:39.855294 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-xpzwc"] Sep 29 21:43:39 crc kubenswrapper[4911]: I0929 21:43:39.876124 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-49f9z" podStartSLOduration=2.876109121 podStartE2EDuration="2.876109121s" podCreationTimestamp="2025-09-29 21:43:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:43:39.87253484 +0000 UTC m=+1097.849647511" watchObservedRunningTime="2025-09-29 21:43:39.876109121 +0000 UTC m=+1097.853221792" Sep 29 21:43:40 crc kubenswrapper[4911]: I0929 21:43:40.859627 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-xpzwc" event={"ID":"823859df-f000-4749-acfe-eb9168574272","Type":"ContainerStarted","Data":"755ed86710fd07765b52da1f63606bf3a3b4b7887b5407d7caa4a7a92f3fe11f"} Sep 29 21:43:40 crc kubenswrapper[4911]: I0929 21:43:40.860099 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-xpzwc" event={"ID":"823859df-f000-4749-acfe-eb9168574272","Type":"ContainerStarted","Data":"13bae527916c96b772841a042be3a5ed3ee1c03025f3113f0c1761717e0fe4a2"} Sep 29 21:43:40 crc kubenswrapper[4911]: I0929 21:43:40.870451 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845d6d6f59-2srf4" event={"ID":"3046cee5-d66c-4a66-bed7-2f4a36df4113","Type":"ContainerStarted","Data":"c5694d357d722d62651238ed887242d75cb687ab76e074f09f99103f990d312f"} Sep 29 21:43:40 crc kubenswrapper[4911]: I0929 21:43:40.870498 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-845d6d6f59-2srf4" Sep 29 21:43:40 crc kubenswrapper[4911]: I0929 21:43:40.887196 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-xpzwc" podStartSLOduration=2.88717688 podStartE2EDuration="2.88717688s" podCreationTimestamp="2025-09-29 21:43:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:43:40.879122421 +0000 UTC m=+1098.856235102" watchObservedRunningTime="2025-09-29 21:43:40.88717688 +0000 UTC m=+1098.864289571" Sep 29 21:43:40 crc kubenswrapper[4911]: I0929 21:43:40.926244 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-845d6d6f59-2srf4" podStartSLOduration=2.926224992 podStartE2EDuration="2.926224992s" podCreationTimestamp="2025-09-29 21:43:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:43:40.919288587 +0000 UTC m=+1098.896401268" watchObservedRunningTime="2025-09-29 21:43:40.926224992 +0000 UTC m=+1098.903337663" Sep 29 21:43:41 crc kubenswrapper[4911]: I0929 21:43:41.720952 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 21:43:41 crc kubenswrapper[4911]: I0929 21:43:41.729822 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 21:43:43 crc kubenswrapper[4911]: I0929 21:43:43.897107 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"72f12417-f4fb-494a-abca-37933bdb860d","Type":"ContainerStarted","Data":"abd68a86a34c886a7d1fa5c16c8acd2f6f5b2c06b6bc536d7c96c7a40ad58c17"} Sep 29 21:43:43 crc kubenswrapper[4911]: I0929 21:43:43.901027 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"01ef4ed4-a76d-4896-9c3e-e62abb21d2cb","Type":"ContainerStarted","Data":"090c00f2e7dad0c3d8266cb2b8839832fcb9aa37984f6d48891aaf53dab3e072"} Sep 29 21:43:43 crc kubenswrapper[4911]: I0929 21:43:43.901070 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"01ef4ed4-a76d-4896-9c3e-e62abb21d2cb","Type":"ContainerStarted","Data":"c80e174571862f4d87767d57130616550e7f6ea265f82cb7160fd3a001c8f630"} Sep 29 21:43:43 crc kubenswrapper[4911]: I0929 21:43:43.903149 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"5189e725-065a-4a70-8e07-4b19758add3c","Type":"ContainerStarted","Data":"84c9b8aa455c4b9d4dec7ff584f98682a4936de9f034bc182a3098fe1e82adac"} Sep 29 21:43:43 crc kubenswrapper[4911]: I0929 21:43:43.903272 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="5189e725-065a-4a70-8e07-4b19758add3c" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://84c9b8aa455c4b9d4dec7ff584f98682a4936de9f034bc182a3098fe1e82adac" gracePeriod=30 Sep 29 21:43:43 crc kubenswrapper[4911]: I0929 21:43:43.908578 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5d18437e-1f1c-413d-8308-5ffe566350ee","Type":"ContainerStarted","Data":"ba18b86cf3718c4e297f32d551819dec82abd8adca127ecb6b3794673abb4710"} Sep 29 21:43:43 crc kubenswrapper[4911]: I0929 21:43:43.908643 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5d18437e-1f1c-413d-8308-5ffe566350ee","Type":"ContainerStarted","Data":"64d9831950763ad0b0279eaa3e77089c1b55fddd97a6caea699f63848ddf203f"} Sep 29 21:43:43 crc kubenswrapper[4911]: I0929 21:43:43.908748 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="5d18437e-1f1c-413d-8308-5ffe566350ee" containerName="nova-metadata-log" containerID="cri-o://64d9831950763ad0b0279eaa3e77089c1b55fddd97a6caea699f63848ddf203f" gracePeriod=30 Sep 29 21:43:43 crc kubenswrapper[4911]: I0929 21:43:43.908784 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="5d18437e-1f1c-413d-8308-5ffe566350ee" containerName="nova-metadata-metadata" containerID="cri-o://ba18b86cf3718c4e297f32d551819dec82abd8adca127ecb6b3794673abb4710" gracePeriod=30 Sep 29 21:43:43 crc kubenswrapper[4911]: I0929 21:43:43.958158 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.444373946 podStartE2EDuration="5.958137139s" podCreationTimestamp="2025-09-29 21:43:38 +0000 UTC" firstStartedPulling="2025-09-29 21:43:39.083420755 +0000 UTC m=+1097.060533426" lastFinishedPulling="2025-09-29 21:43:42.597183948 +0000 UTC m=+1100.574296619" observedRunningTime="2025-09-29 21:43:43.946533029 +0000 UTC m=+1101.923645720" watchObservedRunningTime="2025-09-29 21:43:43.958137139 +0000 UTC m=+1101.935249820" Sep 29 21:43:43 crc kubenswrapper[4911]: I0929 21:43:43.958912 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.292508355 podStartE2EDuration="5.958905433s" podCreationTimestamp="2025-09-29 21:43:38 +0000 UTC" firstStartedPulling="2025-09-29 21:43:38.930261084 +0000 UTC m=+1096.907373755" lastFinishedPulling="2025-09-29 21:43:42.596658152 +0000 UTC m=+1100.573770833" observedRunningTime="2025-09-29 21:43:43.928452068 +0000 UTC m=+1101.905564749" watchObservedRunningTime="2025-09-29 21:43:43.958905433 +0000 UTC m=+1101.936018124" Sep 29 21:43:43 crc kubenswrapper[4911]: I0929 21:43:43.981178 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.421424405 podStartE2EDuration="5.981156953s" podCreationTimestamp="2025-09-29 21:43:38 +0000 UTC" firstStartedPulling="2025-09-29 21:43:39.041202556 +0000 UTC m=+1097.018315227" lastFinishedPulling="2025-09-29 21:43:42.600935104 +0000 UTC m=+1100.578047775" observedRunningTime="2025-09-29 21:43:43.973549477 +0000 UTC m=+1101.950662158" watchObservedRunningTime="2025-09-29 21:43:43.981156953 +0000 UTC m=+1101.958269634" Sep 29 21:43:43 crc kubenswrapper[4911]: I0929 21:43:43.997725 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.901419331 podStartE2EDuration="5.997706276s" podCreationTimestamp="2025-09-29 21:43:38 +0000 UTC" firstStartedPulling="2025-09-29 21:43:39.500990246 +0000 UTC m=+1097.478102917" lastFinishedPulling="2025-09-29 21:43:42.597277171 +0000 UTC m=+1100.574389862" observedRunningTime="2025-09-29 21:43:43.993157146 +0000 UTC m=+1101.970269837" watchObservedRunningTime="2025-09-29 21:43:43.997706276 +0000 UTC m=+1101.974818947" Sep 29 21:43:44 crc kubenswrapper[4911]: I0929 21:43:44.488365 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 21:43:44 crc kubenswrapper[4911]: I0929 21:43:44.646196 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5d18437e-1f1c-413d-8308-5ffe566350ee-logs\") pod \"5d18437e-1f1c-413d-8308-5ffe566350ee\" (UID: \"5d18437e-1f1c-413d-8308-5ffe566350ee\") " Sep 29 21:43:44 crc kubenswrapper[4911]: I0929 21:43:44.646269 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d18437e-1f1c-413d-8308-5ffe566350ee-config-data\") pod \"5d18437e-1f1c-413d-8308-5ffe566350ee\" (UID: \"5d18437e-1f1c-413d-8308-5ffe566350ee\") " Sep 29 21:43:44 crc kubenswrapper[4911]: I0929 21:43:44.646302 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d18437e-1f1c-413d-8308-5ffe566350ee-combined-ca-bundle\") pod \"5d18437e-1f1c-413d-8308-5ffe566350ee\" (UID: \"5d18437e-1f1c-413d-8308-5ffe566350ee\") " Sep 29 21:43:44 crc kubenswrapper[4911]: I0929 21:43:44.646338 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-52s86\" (UniqueName: \"kubernetes.io/projected/5d18437e-1f1c-413d-8308-5ffe566350ee-kube-api-access-52s86\") pod \"5d18437e-1f1c-413d-8308-5ffe566350ee\" (UID: \"5d18437e-1f1c-413d-8308-5ffe566350ee\") " Sep 29 21:43:44 crc kubenswrapper[4911]: I0929 21:43:44.647409 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5d18437e-1f1c-413d-8308-5ffe566350ee-logs" (OuterVolumeSpecName: "logs") pod "5d18437e-1f1c-413d-8308-5ffe566350ee" (UID: "5d18437e-1f1c-413d-8308-5ffe566350ee"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:43:44 crc kubenswrapper[4911]: I0929 21:43:44.652638 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5d18437e-1f1c-413d-8308-5ffe566350ee-kube-api-access-52s86" (OuterVolumeSpecName: "kube-api-access-52s86") pod "5d18437e-1f1c-413d-8308-5ffe566350ee" (UID: "5d18437e-1f1c-413d-8308-5ffe566350ee"). InnerVolumeSpecName "kube-api-access-52s86". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:43:44 crc kubenswrapper[4911]: I0929 21:43:44.681816 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d18437e-1f1c-413d-8308-5ffe566350ee-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5d18437e-1f1c-413d-8308-5ffe566350ee" (UID: "5d18437e-1f1c-413d-8308-5ffe566350ee"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:43:44 crc kubenswrapper[4911]: I0929 21:43:44.701253 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d18437e-1f1c-413d-8308-5ffe566350ee-config-data" (OuterVolumeSpecName: "config-data") pod "5d18437e-1f1c-413d-8308-5ffe566350ee" (UID: "5d18437e-1f1c-413d-8308-5ffe566350ee"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:43:44 crc kubenswrapper[4911]: I0929 21:43:44.748717 4911 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5d18437e-1f1c-413d-8308-5ffe566350ee-logs\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:44 crc kubenswrapper[4911]: I0929 21:43:44.748753 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d18437e-1f1c-413d-8308-5ffe566350ee-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:44 crc kubenswrapper[4911]: I0929 21:43:44.748769 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d18437e-1f1c-413d-8308-5ffe566350ee-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:44 crc kubenswrapper[4911]: I0929 21:43:44.748785 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-52s86\" (UniqueName: \"kubernetes.io/projected/5d18437e-1f1c-413d-8308-5ffe566350ee-kube-api-access-52s86\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:44 crc kubenswrapper[4911]: I0929 21:43:44.936969 4911 generic.go:334] "Generic (PLEG): container finished" podID="5d18437e-1f1c-413d-8308-5ffe566350ee" containerID="ba18b86cf3718c4e297f32d551819dec82abd8adca127ecb6b3794673abb4710" exitCode=0 Sep 29 21:43:44 crc kubenswrapper[4911]: I0929 21:43:44.937001 4911 generic.go:334] "Generic (PLEG): container finished" podID="5d18437e-1f1c-413d-8308-5ffe566350ee" containerID="64d9831950763ad0b0279eaa3e77089c1b55fddd97a6caea699f63848ddf203f" exitCode=143 Sep 29 21:43:44 crc kubenswrapper[4911]: I0929 21:43:44.937902 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 21:43:44 crc kubenswrapper[4911]: I0929 21:43:44.938406 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5d18437e-1f1c-413d-8308-5ffe566350ee","Type":"ContainerDied","Data":"ba18b86cf3718c4e297f32d551819dec82abd8adca127ecb6b3794673abb4710"} Sep 29 21:43:44 crc kubenswrapper[4911]: I0929 21:43:44.938431 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5d18437e-1f1c-413d-8308-5ffe566350ee","Type":"ContainerDied","Data":"64d9831950763ad0b0279eaa3e77089c1b55fddd97a6caea699f63848ddf203f"} Sep 29 21:43:44 crc kubenswrapper[4911]: I0929 21:43:44.938451 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5d18437e-1f1c-413d-8308-5ffe566350ee","Type":"ContainerDied","Data":"bc2d5188e4f0922f8b9fa3b8f8e6e218c03aa47b210943cf230711a110df43e8"} Sep 29 21:43:44 crc kubenswrapper[4911]: I0929 21:43:44.938464 4911 scope.go:117] "RemoveContainer" containerID="ba18b86cf3718c4e297f32d551819dec82abd8adca127ecb6b3794673abb4710" Sep 29 21:43:44 crc kubenswrapper[4911]: I0929 21:43:44.987864 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 21:43:45 crc kubenswrapper[4911]: I0929 21:43:45.007473 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 21:43:45 crc kubenswrapper[4911]: I0929 21:43:45.014435 4911 scope.go:117] "RemoveContainer" containerID="64d9831950763ad0b0279eaa3e77089c1b55fddd97a6caea699f63848ddf203f" Sep 29 21:43:45 crc kubenswrapper[4911]: I0929 21:43:45.039532 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Sep 29 21:43:45 crc kubenswrapper[4911]: E0929 21:43:45.040472 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d18437e-1f1c-413d-8308-5ffe566350ee" containerName="nova-metadata-metadata" Sep 29 21:43:45 crc kubenswrapper[4911]: I0929 21:43:45.040497 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d18437e-1f1c-413d-8308-5ffe566350ee" containerName="nova-metadata-metadata" Sep 29 21:43:45 crc kubenswrapper[4911]: E0929 21:43:45.040523 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d18437e-1f1c-413d-8308-5ffe566350ee" containerName="nova-metadata-log" Sep 29 21:43:45 crc kubenswrapper[4911]: I0929 21:43:45.040531 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d18437e-1f1c-413d-8308-5ffe566350ee" containerName="nova-metadata-log" Sep 29 21:43:45 crc kubenswrapper[4911]: I0929 21:43:45.040885 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="5d18437e-1f1c-413d-8308-5ffe566350ee" containerName="nova-metadata-log" Sep 29 21:43:45 crc kubenswrapper[4911]: I0929 21:43:45.040915 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="5d18437e-1f1c-413d-8308-5ffe566350ee" containerName="nova-metadata-metadata" Sep 29 21:43:45 crc kubenswrapper[4911]: I0929 21:43:45.042957 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 21:43:45 crc kubenswrapper[4911]: I0929 21:43:45.047762 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Sep 29 21:43:45 crc kubenswrapper[4911]: I0929 21:43:45.047953 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Sep 29 21:43:45 crc kubenswrapper[4911]: I0929 21:43:45.056273 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 21:43:45 crc kubenswrapper[4911]: I0929 21:43:45.056923 4911 scope.go:117] "RemoveContainer" containerID="ba18b86cf3718c4e297f32d551819dec82abd8adca127ecb6b3794673abb4710" Sep 29 21:43:45 crc kubenswrapper[4911]: E0929 21:43:45.061913 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ba18b86cf3718c4e297f32d551819dec82abd8adca127ecb6b3794673abb4710\": container with ID starting with ba18b86cf3718c4e297f32d551819dec82abd8adca127ecb6b3794673abb4710 not found: ID does not exist" containerID="ba18b86cf3718c4e297f32d551819dec82abd8adca127ecb6b3794673abb4710" Sep 29 21:43:45 crc kubenswrapper[4911]: I0929 21:43:45.062089 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ba18b86cf3718c4e297f32d551819dec82abd8adca127ecb6b3794673abb4710"} err="failed to get container status \"ba18b86cf3718c4e297f32d551819dec82abd8adca127ecb6b3794673abb4710\": rpc error: code = NotFound desc = could not find container \"ba18b86cf3718c4e297f32d551819dec82abd8adca127ecb6b3794673abb4710\": container with ID starting with ba18b86cf3718c4e297f32d551819dec82abd8adca127ecb6b3794673abb4710 not found: ID does not exist" Sep 29 21:43:45 crc kubenswrapper[4911]: I0929 21:43:45.062202 4911 scope.go:117] "RemoveContainer" containerID="64d9831950763ad0b0279eaa3e77089c1b55fddd97a6caea699f63848ddf203f" Sep 29 21:43:45 crc kubenswrapper[4911]: E0929 21:43:45.062645 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"64d9831950763ad0b0279eaa3e77089c1b55fddd97a6caea699f63848ddf203f\": container with ID starting with 64d9831950763ad0b0279eaa3e77089c1b55fddd97a6caea699f63848ddf203f not found: ID does not exist" containerID="64d9831950763ad0b0279eaa3e77089c1b55fddd97a6caea699f63848ddf203f" Sep 29 21:43:45 crc kubenswrapper[4911]: I0929 21:43:45.062690 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"64d9831950763ad0b0279eaa3e77089c1b55fddd97a6caea699f63848ddf203f"} err="failed to get container status \"64d9831950763ad0b0279eaa3e77089c1b55fddd97a6caea699f63848ddf203f\": rpc error: code = NotFound desc = could not find container \"64d9831950763ad0b0279eaa3e77089c1b55fddd97a6caea699f63848ddf203f\": container with ID starting with 64d9831950763ad0b0279eaa3e77089c1b55fddd97a6caea699f63848ddf203f not found: ID does not exist" Sep 29 21:43:45 crc kubenswrapper[4911]: I0929 21:43:45.062728 4911 scope.go:117] "RemoveContainer" containerID="ba18b86cf3718c4e297f32d551819dec82abd8adca127ecb6b3794673abb4710" Sep 29 21:43:45 crc kubenswrapper[4911]: I0929 21:43:45.066208 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ba18b86cf3718c4e297f32d551819dec82abd8adca127ecb6b3794673abb4710"} err="failed to get container status \"ba18b86cf3718c4e297f32d551819dec82abd8adca127ecb6b3794673abb4710\": rpc error: code = NotFound desc = could not find container \"ba18b86cf3718c4e297f32d551819dec82abd8adca127ecb6b3794673abb4710\": container with ID starting with ba18b86cf3718c4e297f32d551819dec82abd8adca127ecb6b3794673abb4710 not found: ID does not exist" Sep 29 21:43:45 crc kubenswrapper[4911]: I0929 21:43:45.066234 4911 scope.go:117] "RemoveContainer" containerID="64d9831950763ad0b0279eaa3e77089c1b55fddd97a6caea699f63848ddf203f" Sep 29 21:43:45 crc kubenswrapper[4911]: I0929 21:43:45.066672 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"64d9831950763ad0b0279eaa3e77089c1b55fddd97a6caea699f63848ddf203f"} err="failed to get container status \"64d9831950763ad0b0279eaa3e77089c1b55fddd97a6caea699f63848ddf203f\": rpc error: code = NotFound desc = could not find container \"64d9831950763ad0b0279eaa3e77089c1b55fddd97a6caea699f63848ddf203f\": container with ID starting with 64d9831950763ad0b0279eaa3e77089c1b55fddd97a6caea699f63848ddf203f not found: ID does not exist" Sep 29 21:43:45 crc kubenswrapper[4911]: I0929 21:43:45.164607 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73cd0cda-606b-41b1-b7b6-92c6650d4dea-config-data\") pod \"nova-metadata-0\" (UID: \"73cd0cda-606b-41b1-b7b6-92c6650d4dea\") " pod="openstack/nova-metadata-0" Sep 29 21:43:45 crc kubenswrapper[4911]: I0929 21:43:45.164722 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/73cd0cda-606b-41b1-b7b6-92c6650d4dea-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"73cd0cda-606b-41b1-b7b6-92c6650d4dea\") " pod="openstack/nova-metadata-0" Sep 29 21:43:45 crc kubenswrapper[4911]: I0929 21:43:45.164779 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/73cd0cda-606b-41b1-b7b6-92c6650d4dea-logs\") pod \"nova-metadata-0\" (UID: \"73cd0cda-606b-41b1-b7b6-92c6650d4dea\") " pod="openstack/nova-metadata-0" Sep 29 21:43:45 crc kubenswrapper[4911]: I0929 21:43:45.164842 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nrtwk\" (UniqueName: \"kubernetes.io/projected/73cd0cda-606b-41b1-b7b6-92c6650d4dea-kube-api-access-nrtwk\") pod \"nova-metadata-0\" (UID: \"73cd0cda-606b-41b1-b7b6-92c6650d4dea\") " pod="openstack/nova-metadata-0" Sep 29 21:43:45 crc kubenswrapper[4911]: I0929 21:43:45.164869 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73cd0cda-606b-41b1-b7b6-92c6650d4dea-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"73cd0cda-606b-41b1-b7b6-92c6650d4dea\") " pod="openstack/nova-metadata-0" Sep 29 21:43:45 crc kubenswrapper[4911]: I0929 21:43:45.266842 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73cd0cda-606b-41b1-b7b6-92c6650d4dea-config-data\") pod \"nova-metadata-0\" (UID: \"73cd0cda-606b-41b1-b7b6-92c6650d4dea\") " pod="openstack/nova-metadata-0" Sep 29 21:43:45 crc kubenswrapper[4911]: I0929 21:43:45.266972 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/73cd0cda-606b-41b1-b7b6-92c6650d4dea-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"73cd0cda-606b-41b1-b7b6-92c6650d4dea\") " pod="openstack/nova-metadata-0" Sep 29 21:43:45 crc kubenswrapper[4911]: I0929 21:43:45.267027 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/73cd0cda-606b-41b1-b7b6-92c6650d4dea-logs\") pod \"nova-metadata-0\" (UID: \"73cd0cda-606b-41b1-b7b6-92c6650d4dea\") " pod="openstack/nova-metadata-0" Sep 29 21:43:45 crc kubenswrapper[4911]: I0929 21:43:45.267066 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nrtwk\" (UniqueName: \"kubernetes.io/projected/73cd0cda-606b-41b1-b7b6-92c6650d4dea-kube-api-access-nrtwk\") pod \"nova-metadata-0\" (UID: \"73cd0cda-606b-41b1-b7b6-92c6650d4dea\") " pod="openstack/nova-metadata-0" Sep 29 21:43:45 crc kubenswrapper[4911]: I0929 21:43:45.267089 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73cd0cda-606b-41b1-b7b6-92c6650d4dea-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"73cd0cda-606b-41b1-b7b6-92c6650d4dea\") " pod="openstack/nova-metadata-0" Sep 29 21:43:45 crc kubenswrapper[4911]: I0929 21:43:45.269004 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/73cd0cda-606b-41b1-b7b6-92c6650d4dea-logs\") pod \"nova-metadata-0\" (UID: \"73cd0cda-606b-41b1-b7b6-92c6650d4dea\") " pod="openstack/nova-metadata-0" Sep 29 21:43:45 crc kubenswrapper[4911]: I0929 21:43:45.271247 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73cd0cda-606b-41b1-b7b6-92c6650d4dea-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"73cd0cda-606b-41b1-b7b6-92c6650d4dea\") " pod="openstack/nova-metadata-0" Sep 29 21:43:45 crc kubenswrapper[4911]: I0929 21:43:45.272158 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/73cd0cda-606b-41b1-b7b6-92c6650d4dea-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"73cd0cda-606b-41b1-b7b6-92c6650d4dea\") " pod="openstack/nova-metadata-0" Sep 29 21:43:45 crc kubenswrapper[4911]: I0929 21:43:45.275962 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73cd0cda-606b-41b1-b7b6-92c6650d4dea-config-data\") pod \"nova-metadata-0\" (UID: \"73cd0cda-606b-41b1-b7b6-92c6650d4dea\") " pod="openstack/nova-metadata-0" Sep 29 21:43:45 crc kubenswrapper[4911]: I0929 21:43:45.303177 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nrtwk\" (UniqueName: \"kubernetes.io/projected/73cd0cda-606b-41b1-b7b6-92c6650d4dea-kube-api-access-nrtwk\") pod \"nova-metadata-0\" (UID: \"73cd0cda-606b-41b1-b7b6-92c6650d4dea\") " pod="openstack/nova-metadata-0" Sep 29 21:43:45 crc kubenswrapper[4911]: I0929 21:43:45.363763 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 21:43:45 crc kubenswrapper[4911]: I0929 21:43:45.883691 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Sep 29 21:43:45 crc kubenswrapper[4911]: I0929 21:43:45.972294 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 21:43:45 crc kubenswrapper[4911]: W0929 21:43:45.976521 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod73cd0cda_606b_41b1_b7b6_92c6650d4dea.slice/crio-690002f5b264032c3d843d5a0117b6a3735b2350be8d19b05316b162774879e7 WatchSource:0}: Error finding container 690002f5b264032c3d843d5a0117b6a3735b2350be8d19b05316b162774879e7: Status 404 returned error can't find the container with id 690002f5b264032c3d843d5a0117b6a3735b2350be8d19b05316b162774879e7 Sep 29 21:43:46 crc kubenswrapper[4911]: I0929 21:43:46.725402 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5d18437e-1f1c-413d-8308-5ffe566350ee" path="/var/lib/kubelet/pods/5d18437e-1f1c-413d-8308-5ffe566350ee/volumes" Sep 29 21:43:46 crc kubenswrapper[4911]: I0929 21:43:46.962577 4911 generic.go:334] "Generic (PLEG): container finished" podID="7c8f9423-562b-49fa-946d-10d52101e44c" containerID="8f60832b0ac2ad7edac27180158f9865b7ee0aa997d04e16793bce56d5f08973" exitCode=0 Sep 29 21:43:46 crc kubenswrapper[4911]: I0929 21:43:46.962623 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-49f9z" event={"ID":"7c8f9423-562b-49fa-946d-10d52101e44c","Type":"ContainerDied","Data":"8f60832b0ac2ad7edac27180158f9865b7ee0aa997d04e16793bce56d5f08973"} Sep 29 21:43:46 crc kubenswrapper[4911]: I0929 21:43:46.968363 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"73cd0cda-606b-41b1-b7b6-92c6650d4dea","Type":"ContainerStarted","Data":"9007993a80b139dedc9a60ad4a9aeed265a16c608ad3794a9ac114796741c51f"} Sep 29 21:43:46 crc kubenswrapper[4911]: I0929 21:43:46.968423 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"73cd0cda-606b-41b1-b7b6-92c6650d4dea","Type":"ContainerStarted","Data":"196b7baa21c7d82333181421c08280dfcb503534339974acd2db7013924a8e0a"} Sep 29 21:43:46 crc kubenswrapper[4911]: I0929 21:43:46.968439 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"73cd0cda-606b-41b1-b7b6-92c6650d4dea","Type":"ContainerStarted","Data":"690002f5b264032c3d843d5a0117b6a3735b2350be8d19b05316b162774879e7"} Sep 29 21:43:47 crc kubenswrapper[4911]: I0929 21:43:47.010768 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.010749506 podStartE2EDuration="3.010749506s" podCreationTimestamp="2025-09-29 21:43:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:43:47.002434318 +0000 UTC m=+1104.979546999" watchObservedRunningTime="2025-09-29 21:43:47.010749506 +0000 UTC m=+1104.987862187" Sep 29 21:43:47 crc kubenswrapper[4911]: I0929 21:43:47.976870 4911 generic.go:334] "Generic (PLEG): container finished" podID="823859df-f000-4749-acfe-eb9168574272" containerID="755ed86710fd07765b52da1f63606bf3a3b4b7887b5407d7caa4a7a92f3fe11f" exitCode=0 Sep 29 21:43:47 crc kubenswrapper[4911]: I0929 21:43:47.977085 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-xpzwc" event={"ID":"823859df-f000-4749-acfe-eb9168574272","Type":"ContainerDied","Data":"755ed86710fd07765b52da1f63606bf3a3b4b7887b5407d7caa4a7a92f3fe11f"} Sep 29 21:43:48 crc kubenswrapper[4911]: I0929 21:43:48.346986 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Sep 29 21:43:48 crc kubenswrapper[4911]: I0929 21:43:48.347259 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Sep 29 21:43:48 crc kubenswrapper[4911]: I0929 21:43:48.371867 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Sep 29 21:43:48 crc kubenswrapper[4911]: I0929 21:43:48.382507 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-49f9z" Sep 29 21:43:48 crc kubenswrapper[4911]: I0929 21:43:48.525925 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c8f9423-562b-49fa-946d-10d52101e44c-combined-ca-bundle\") pod \"7c8f9423-562b-49fa-946d-10d52101e44c\" (UID: \"7c8f9423-562b-49fa-946d-10d52101e44c\") " Sep 29 21:43:48 crc kubenswrapper[4911]: I0929 21:43:48.526036 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7c8f9423-562b-49fa-946d-10d52101e44c-scripts\") pod \"7c8f9423-562b-49fa-946d-10d52101e44c\" (UID: \"7c8f9423-562b-49fa-946d-10d52101e44c\") " Sep 29 21:43:48 crc kubenswrapper[4911]: I0929 21:43:48.526099 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vrdn2\" (UniqueName: \"kubernetes.io/projected/7c8f9423-562b-49fa-946d-10d52101e44c-kube-api-access-vrdn2\") pod \"7c8f9423-562b-49fa-946d-10d52101e44c\" (UID: \"7c8f9423-562b-49fa-946d-10d52101e44c\") " Sep 29 21:43:48 crc kubenswrapper[4911]: I0929 21:43:48.526182 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c8f9423-562b-49fa-946d-10d52101e44c-config-data\") pod \"7c8f9423-562b-49fa-946d-10d52101e44c\" (UID: \"7c8f9423-562b-49fa-946d-10d52101e44c\") " Sep 29 21:43:48 crc kubenswrapper[4911]: I0929 21:43:48.536357 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c8f9423-562b-49fa-946d-10d52101e44c-scripts" (OuterVolumeSpecName: "scripts") pod "7c8f9423-562b-49fa-946d-10d52101e44c" (UID: "7c8f9423-562b-49fa-946d-10d52101e44c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:43:48 crc kubenswrapper[4911]: I0929 21:43:48.537974 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7c8f9423-562b-49fa-946d-10d52101e44c-kube-api-access-vrdn2" (OuterVolumeSpecName: "kube-api-access-vrdn2") pod "7c8f9423-562b-49fa-946d-10d52101e44c" (UID: "7c8f9423-562b-49fa-946d-10d52101e44c"). InnerVolumeSpecName "kube-api-access-vrdn2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:43:48 crc kubenswrapper[4911]: I0929 21:43:48.564300 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c8f9423-562b-49fa-946d-10d52101e44c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7c8f9423-562b-49fa-946d-10d52101e44c" (UID: "7c8f9423-562b-49fa-946d-10d52101e44c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:43:48 crc kubenswrapper[4911]: I0929 21:43:48.567873 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c8f9423-562b-49fa-946d-10d52101e44c-config-data" (OuterVolumeSpecName: "config-data") pod "7c8f9423-562b-49fa-946d-10d52101e44c" (UID: "7c8f9423-562b-49fa-946d-10d52101e44c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:43:48 crc kubenswrapper[4911]: I0929 21:43:48.568549 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 29 21:43:48 crc kubenswrapper[4911]: I0929 21:43:48.568597 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 29 21:43:48 crc kubenswrapper[4911]: I0929 21:43:48.628798 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c8f9423-562b-49fa-946d-10d52101e44c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:48 crc kubenswrapper[4911]: I0929 21:43:48.628825 4911 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7c8f9423-562b-49fa-946d-10d52101e44c-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:48 crc kubenswrapper[4911]: I0929 21:43:48.628835 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vrdn2\" (UniqueName: \"kubernetes.io/projected/7c8f9423-562b-49fa-946d-10d52101e44c-kube-api-access-vrdn2\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:48 crc kubenswrapper[4911]: I0929 21:43:48.628848 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c8f9423-562b-49fa-946d-10d52101e44c-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:48 crc kubenswrapper[4911]: I0929 21:43:48.654009 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-845d6d6f59-2srf4" Sep 29 21:43:48 crc kubenswrapper[4911]: I0929 21:43:48.779413 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Sep 29 21:43:48 crc kubenswrapper[4911]: I0929 21:43:48.839486 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-q9jx9"] Sep 29 21:43:48 crc kubenswrapper[4911]: I0929 21:43:48.841591 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5784cf869f-q9jx9" podUID="fcbf9287-d564-45b2-b6be-4cbed087355f" containerName="dnsmasq-dns" containerID="cri-o://0f60db69caaf7760661f4272d40eedbed2ff2060dbe12302fec34a8b09c26d13" gracePeriod=10 Sep 29 21:43:49 crc kubenswrapper[4911]: I0929 21:43:49.004590 4911 generic.go:334] "Generic (PLEG): container finished" podID="fcbf9287-d564-45b2-b6be-4cbed087355f" containerID="0f60db69caaf7760661f4272d40eedbed2ff2060dbe12302fec34a8b09c26d13" exitCode=0 Sep 29 21:43:49 crc kubenswrapper[4911]: I0929 21:43:49.004676 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-q9jx9" event={"ID":"fcbf9287-d564-45b2-b6be-4cbed087355f","Type":"ContainerDied","Data":"0f60db69caaf7760661f4272d40eedbed2ff2060dbe12302fec34a8b09c26d13"} Sep 29 21:43:49 crc kubenswrapper[4911]: I0929 21:43:49.013102 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-49f9z" Sep 29 21:43:49 crc kubenswrapper[4911]: I0929 21:43:49.013297 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-49f9z" event={"ID":"7c8f9423-562b-49fa-946d-10d52101e44c","Type":"ContainerDied","Data":"091a3d21c3df913fccbc4a36cb3ce428ff231a4e395dcad58fbfe273ab2c062d"} Sep 29 21:43:49 crc kubenswrapper[4911]: I0929 21:43:49.013324 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="091a3d21c3df913fccbc4a36cb3ce428ff231a4e395dcad58fbfe273ab2c062d" Sep 29 21:43:49 crc kubenswrapper[4911]: I0929 21:43:49.056130 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Sep 29 21:43:49 crc kubenswrapper[4911]: I0929 21:43:49.255187 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 29 21:43:49 crc kubenswrapper[4911]: I0929 21:43:49.255663 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="01ef4ed4-a76d-4896-9c3e-e62abb21d2cb" containerName="nova-api-log" containerID="cri-o://c80e174571862f4d87767d57130616550e7f6ea265f82cb7160fd3a001c8f630" gracePeriod=30 Sep 29 21:43:49 crc kubenswrapper[4911]: I0929 21:43:49.256050 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="01ef4ed4-a76d-4896-9c3e-e62abb21d2cb" containerName="nova-api-api" containerID="cri-o://090c00f2e7dad0c3d8266cb2b8839832fcb9aa37984f6d48891aaf53dab3e072" gracePeriod=30 Sep 29 21:43:49 crc kubenswrapper[4911]: I0929 21:43:49.275947 4911 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="01ef4ed4-a76d-4896-9c3e-e62abb21d2cb" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.187:8774/\": EOF" Sep 29 21:43:49 crc kubenswrapper[4911]: I0929 21:43:49.275968 4911 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="01ef4ed4-a76d-4896-9c3e-e62abb21d2cb" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.187:8774/\": EOF" Sep 29 21:43:49 crc kubenswrapper[4911]: I0929 21:43:49.278154 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 21:43:49 crc kubenswrapper[4911]: I0929 21:43:49.278347 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="73cd0cda-606b-41b1-b7b6-92c6650d4dea" containerName="nova-metadata-log" containerID="cri-o://196b7baa21c7d82333181421c08280dfcb503534339974acd2db7013924a8e0a" gracePeriod=30 Sep 29 21:43:49 crc kubenswrapper[4911]: I0929 21:43:49.278926 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="73cd0cda-606b-41b1-b7b6-92c6650d4dea" containerName="nova-metadata-metadata" containerID="cri-o://9007993a80b139dedc9a60ad4a9aeed265a16c608ad3794a9ac114796741c51f" gracePeriod=30 Sep 29 21:43:49 crc kubenswrapper[4911]: I0929 21:43:49.502331 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5784cf869f-q9jx9" Sep 29 21:43:49 crc kubenswrapper[4911]: I0929 21:43:49.581455 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-xpzwc" Sep 29 21:43:49 crc kubenswrapper[4911]: I0929 21:43:49.627290 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 21:43:49 crc kubenswrapper[4911]: I0929 21:43:49.699140 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/823859df-f000-4749-acfe-eb9168574272-combined-ca-bundle\") pod \"823859df-f000-4749-acfe-eb9168574272\" (UID: \"823859df-f000-4749-acfe-eb9168574272\") " Sep 29 21:43:49 crc kubenswrapper[4911]: I0929 21:43:49.700686 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fcbf9287-d564-45b2-b6be-4cbed087355f-config\") pod \"fcbf9287-d564-45b2-b6be-4cbed087355f\" (UID: \"fcbf9287-d564-45b2-b6be-4cbed087355f\") " Sep 29 21:43:49 crc kubenswrapper[4911]: I0929 21:43:49.700752 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/823859df-f000-4749-acfe-eb9168574272-config-data\") pod \"823859df-f000-4749-acfe-eb9168574272\" (UID: \"823859df-f000-4749-acfe-eb9168574272\") " Sep 29 21:43:49 crc kubenswrapper[4911]: I0929 21:43:49.700772 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ff4vd\" (UniqueName: \"kubernetes.io/projected/fcbf9287-d564-45b2-b6be-4cbed087355f-kube-api-access-ff4vd\") pod \"fcbf9287-d564-45b2-b6be-4cbed087355f\" (UID: \"fcbf9287-d564-45b2-b6be-4cbed087355f\") " Sep 29 21:43:49 crc kubenswrapper[4911]: I0929 21:43:49.701065 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/823859df-f000-4749-acfe-eb9168574272-scripts\") pod \"823859df-f000-4749-acfe-eb9168574272\" (UID: \"823859df-f000-4749-acfe-eb9168574272\") " Sep 29 21:43:49 crc kubenswrapper[4911]: I0929 21:43:49.701106 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fcbf9287-d564-45b2-b6be-4cbed087355f-dns-svc\") pod \"fcbf9287-d564-45b2-b6be-4cbed087355f\" (UID: \"fcbf9287-d564-45b2-b6be-4cbed087355f\") " Sep 29 21:43:49 crc kubenswrapper[4911]: I0929 21:43:49.701164 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/fcbf9287-d564-45b2-b6be-4cbed087355f-dns-swift-storage-0\") pod \"fcbf9287-d564-45b2-b6be-4cbed087355f\" (UID: \"fcbf9287-d564-45b2-b6be-4cbed087355f\") " Sep 29 21:43:49 crc kubenswrapper[4911]: I0929 21:43:49.701225 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fcbf9287-d564-45b2-b6be-4cbed087355f-ovsdbserver-nb\") pod \"fcbf9287-d564-45b2-b6be-4cbed087355f\" (UID: \"fcbf9287-d564-45b2-b6be-4cbed087355f\") " Sep 29 21:43:49 crc kubenswrapper[4911]: I0929 21:43:49.701304 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fcbf9287-d564-45b2-b6be-4cbed087355f-ovsdbserver-sb\") pod \"fcbf9287-d564-45b2-b6be-4cbed087355f\" (UID: \"fcbf9287-d564-45b2-b6be-4cbed087355f\") " Sep 29 21:43:49 crc kubenswrapper[4911]: I0929 21:43:49.701360 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fclhz\" (UniqueName: \"kubernetes.io/projected/823859df-f000-4749-acfe-eb9168574272-kube-api-access-fclhz\") pod \"823859df-f000-4749-acfe-eb9168574272\" (UID: \"823859df-f000-4749-acfe-eb9168574272\") " Sep 29 21:43:49 crc kubenswrapper[4911]: I0929 21:43:49.707941 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/823859df-f000-4749-acfe-eb9168574272-scripts" (OuterVolumeSpecName: "scripts") pod "823859df-f000-4749-acfe-eb9168574272" (UID: "823859df-f000-4749-acfe-eb9168574272"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:43:49 crc kubenswrapper[4911]: I0929 21:43:49.715157 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fcbf9287-d564-45b2-b6be-4cbed087355f-kube-api-access-ff4vd" (OuterVolumeSpecName: "kube-api-access-ff4vd") pod "fcbf9287-d564-45b2-b6be-4cbed087355f" (UID: "fcbf9287-d564-45b2-b6be-4cbed087355f"). InnerVolumeSpecName "kube-api-access-ff4vd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:43:49 crc kubenswrapper[4911]: I0929 21:43:49.732238 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/823859df-f000-4749-acfe-eb9168574272-kube-api-access-fclhz" (OuterVolumeSpecName: "kube-api-access-fclhz") pod "823859df-f000-4749-acfe-eb9168574272" (UID: "823859df-f000-4749-acfe-eb9168574272"). InnerVolumeSpecName "kube-api-access-fclhz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:43:49 crc kubenswrapper[4911]: I0929 21:43:49.734178 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/823859df-f000-4749-acfe-eb9168574272-config-data" (OuterVolumeSpecName: "config-data") pod "823859df-f000-4749-acfe-eb9168574272" (UID: "823859df-f000-4749-acfe-eb9168574272"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:43:49 crc kubenswrapper[4911]: I0929 21:43:49.772807 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fcbf9287-d564-45b2-b6be-4cbed087355f-config" (OuterVolumeSpecName: "config") pod "fcbf9287-d564-45b2-b6be-4cbed087355f" (UID: "fcbf9287-d564-45b2-b6be-4cbed087355f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:43:49 crc kubenswrapper[4911]: I0929 21:43:49.776260 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fcbf9287-d564-45b2-b6be-4cbed087355f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "fcbf9287-d564-45b2-b6be-4cbed087355f" (UID: "fcbf9287-d564-45b2-b6be-4cbed087355f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:43:49 crc kubenswrapper[4911]: I0929 21:43:49.778626 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/823859df-f000-4749-acfe-eb9168574272-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "823859df-f000-4749-acfe-eb9168574272" (UID: "823859df-f000-4749-acfe-eb9168574272"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:43:49 crc kubenswrapper[4911]: I0929 21:43:49.785715 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fcbf9287-d564-45b2-b6be-4cbed087355f-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "fcbf9287-d564-45b2-b6be-4cbed087355f" (UID: "fcbf9287-d564-45b2-b6be-4cbed087355f"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:43:49 crc kubenswrapper[4911]: I0929 21:43:49.800321 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fcbf9287-d564-45b2-b6be-4cbed087355f-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "fcbf9287-d564-45b2-b6be-4cbed087355f" (UID: "fcbf9287-d564-45b2-b6be-4cbed087355f"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:43:49 crc kubenswrapper[4911]: I0929 21:43:49.801548 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fcbf9287-d564-45b2-b6be-4cbed087355f-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "fcbf9287-d564-45b2-b6be-4cbed087355f" (UID: "fcbf9287-d564-45b2-b6be-4cbed087355f"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:43:49 crc kubenswrapper[4911]: I0929 21:43:49.802430 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/823859df-f000-4749-acfe-eb9168574272-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:49 crc kubenswrapper[4911]: I0929 21:43:49.805373 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ff4vd\" (UniqueName: \"kubernetes.io/projected/fcbf9287-d564-45b2-b6be-4cbed087355f-kube-api-access-ff4vd\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:49 crc kubenswrapper[4911]: I0929 21:43:49.805450 4911 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/823859df-f000-4749-acfe-eb9168574272-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:49 crc kubenswrapper[4911]: I0929 21:43:49.805517 4911 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fcbf9287-d564-45b2-b6be-4cbed087355f-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:49 crc kubenswrapper[4911]: I0929 21:43:49.805684 4911 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/fcbf9287-d564-45b2-b6be-4cbed087355f-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:49 crc kubenswrapper[4911]: I0929 21:43:49.805755 4911 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fcbf9287-d564-45b2-b6be-4cbed087355f-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:49 crc kubenswrapper[4911]: I0929 21:43:49.805845 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fclhz\" (UniqueName: \"kubernetes.io/projected/823859df-f000-4749-acfe-eb9168574272-kube-api-access-fclhz\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:49 crc kubenswrapper[4911]: I0929 21:43:49.805944 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/823859df-f000-4749-acfe-eb9168574272-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:49 crc kubenswrapper[4911]: I0929 21:43:49.806017 4911 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fcbf9287-d564-45b2-b6be-4cbed087355f-config\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:49 crc kubenswrapper[4911]: I0929 21:43:49.896103 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 21:43:49 crc kubenswrapper[4911]: I0929 21:43:49.907236 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73cd0cda-606b-41b1-b7b6-92c6650d4dea-combined-ca-bundle\") pod \"73cd0cda-606b-41b1-b7b6-92c6650d4dea\" (UID: \"73cd0cda-606b-41b1-b7b6-92c6650d4dea\") " Sep 29 21:43:49 crc kubenswrapper[4911]: I0929 21:43:49.907552 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nrtwk\" (UniqueName: \"kubernetes.io/projected/73cd0cda-606b-41b1-b7b6-92c6650d4dea-kube-api-access-nrtwk\") pod \"73cd0cda-606b-41b1-b7b6-92c6650d4dea\" (UID: \"73cd0cda-606b-41b1-b7b6-92c6650d4dea\") " Sep 29 21:43:49 crc kubenswrapper[4911]: I0929 21:43:49.907667 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73cd0cda-606b-41b1-b7b6-92c6650d4dea-config-data\") pod \"73cd0cda-606b-41b1-b7b6-92c6650d4dea\" (UID: \"73cd0cda-606b-41b1-b7b6-92c6650d4dea\") " Sep 29 21:43:49 crc kubenswrapper[4911]: I0929 21:43:49.907834 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/73cd0cda-606b-41b1-b7b6-92c6650d4dea-logs\") pod \"73cd0cda-606b-41b1-b7b6-92c6650d4dea\" (UID: \"73cd0cda-606b-41b1-b7b6-92c6650d4dea\") " Sep 29 21:43:49 crc kubenswrapper[4911]: I0929 21:43:49.907949 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/73cd0cda-606b-41b1-b7b6-92c6650d4dea-nova-metadata-tls-certs\") pod \"73cd0cda-606b-41b1-b7b6-92c6650d4dea\" (UID: \"73cd0cda-606b-41b1-b7b6-92c6650d4dea\") " Sep 29 21:43:49 crc kubenswrapper[4911]: I0929 21:43:49.908388 4911 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fcbf9287-d564-45b2-b6be-4cbed087355f-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:49 crc kubenswrapper[4911]: I0929 21:43:49.909268 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/73cd0cda-606b-41b1-b7b6-92c6650d4dea-logs" (OuterVolumeSpecName: "logs") pod "73cd0cda-606b-41b1-b7b6-92c6650d4dea" (UID: "73cd0cda-606b-41b1-b7b6-92c6650d4dea"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:43:49 crc kubenswrapper[4911]: I0929 21:43:49.918031 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/73cd0cda-606b-41b1-b7b6-92c6650d4dea-kube-api-access-nrtwk" (OuterVolumeSpecName: "kube-api-access-nrtwk") pod "73cd0cda-606b-41b1-b7b6-92c6650d4dea" (UID: "73cd0cda-606b-41b1-b7b6-92c6650d4dea"). InnerVolumeSpecName "kube-api-access-nrtwk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:43:49 crc kubenswrapper[4911]: I0929 21:43:49.962625 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73cd0cda-606b-41b1-b7b6-92c6650d4dea-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "73cd0cda-606b-41b1-b7b6-92c6650d4dea" (UID: "73cd0cda-606b-41b1-b7b6-92c6650d4dea"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:43:49 crc kubenswrapper[4911]: I0929 21:43:49.982855 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73cd0cda-606b-41b1-b7b6-92c6650d4dea-config-data" (OuterVolumeSpecName: "config-data") pod "73cd0cda-606b-41b1-b7b6-92c6650d4dea" (UID: "73cd0cda-606b-41b1-b7b6-92c6650d4dea"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:43:49 crc kubenswrapper[4911]: I0929 21:43:49.986188 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73cd0cda-606b-41b1-b7b6-92c6650d4dea-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "73cd0cda-606b-41b1-b7b6-92c6650d4dea" (UID: "73cd0cda-606b-41b1-b7b6-92c6650d4dea"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.012763 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73cd0cda-606b-41b1-b7b6-92c6650d4dea-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.012811 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nrtwk\" (UniqueName: \"kubernetes.io/projected/73cd0cda-606b-41b1-b7b6-92c6650d4dea-kube-api-access-nrtwk\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.012823 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73cd0cda-606b-41b1-b7b6-92c6650d4dea-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.012831 4911 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/73cd0cda-606b-41b1-b7b6-92c6650d4dea-logs\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.012840 4911 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/73cd0cda-606b-41b1-b7b6-92c6650d4dea-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.027209 4911 generic.go:334] "Generic (PLEG): container finished" podID="73cd0cda-606b-41b1-b7b6-92c6650d4dea" containerID="9007993a80b139dedc9a60ad4a9aeed265a16c608ad3794a9ac114796741c51f" exitCode=0 Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.027492 4911 generic.go:334] "Generic (PLEG): container finished" podID="73cd0cda-606b-41b1-b7b6-92c6650d4dea" containerID="196b7baa21c7d82333181421c08280dfcb503534339974acd2db7013924a8e0a" exitCode=143 Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.027296 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"73cd0cda-606b-41b1-b7b6-92c6650d4dea","Type":"ContainerDied","Data":"9007993a80b139dedc9a60ad4a9aeed265a16c608ad3794a9ac114796741c51f"} Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.027764 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"73cd0cda-606b-41b1-b7b6-92c6650d4dea","Type":"ContainerDied","Data":"196b7baa21c7d82333181421c08280dfcb503534339974acd2db7013924a8e0a"} Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.027980 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"73cd0cda-606b-41b1-b7b6-92c6650d4dea","Type":"ContainerDied","Data":"690002f5b264032c3d843d5a0117b6a3735b2350be8d19b05316b162774879e7"} Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.028255 4911 scope.go:117] "RemoveContainer" containerID="9007993a80b139dedc9a60ad4a9aeed265a16c608ad3794a9ac114796741c51f" Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.027276 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.033964 4911 generic.go:334] "Generic (PLEG): container finished" podID="01ef4ed4-a76d-4896-9c3e-e62abb21d2cb" containerID="c80e174571862f4d87767d57130616550e7f6ea265f82cb7160fd3a001c8f630" exitCode=143 Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.034017 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"01ef4ed4-a76d-4896-9c3e-e62abb21d2cb","Type":"ContainerDied","Data":"c80e174571862f4d87767d57130616550e7f6ea265f82cb7160fd3a001c8f630"} Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.055851 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-q9jx9" event={"ID":"fcbf9287-d564-45b2-b6be-4cbed087355f","Type":"ContainerDied","Data":"e05ebfc79a3be6f3a75cc3dd135da6869396f28bf41b18e282c51368c99e0f48"} Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.055935 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5784cf869f-q9jx9" Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.061535 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-xpzwc" Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.068404 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-xpzwc" event={"ID":"823859df-f000-4749-acfe-eb9168574272","Type":"ContainerDied","Data":"13bae527916c96b772841a042be3a5ed3ee1c03025f3113f0c1761717e0fe4a2"} Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.068464 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="13bae527916c96b772841a042be3a5ed3ee1c03025f3113f0c1761717e0fe4a2" Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.069635 4911 scope.go:117] "RemoveContainer" containerID="196b7baa21c7d82333181421c08280dfcb503534339974acd2db7013924a8e0a" Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.077635 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.087253 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 29 21:43:50 crc kubenswrapper[4911]: E0929 21:43:50.087756 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="73cd0cda-606b-41b1-b7b6-92c6650d4dea" containerName="nova-metadata-log" Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.087782 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="73cd0cda-606b-41b1-b7b6-92c6650d4dea" containerName="nova-metadata-log" Sep 29 21:43:50 crc kubenswrapper[4911]: E0929 21:43:50.088167 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fcbf9287-d564-45b2-b6be-4cbed087355f" containerName="dnsmasq-dns" Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.088181 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="fcbf9287-d564-45b2-b6be-4cbed087355f" containerName="dnsmasq-dns" Sep 29 21:43:50 crc kubenswrapper[4911]: E0929 21:43:50.088193 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fcbf9287-d564-45b2-b6be-4cbed087355f" containerName="init" Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.088512 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="fcbf9287-d564-45b2-b6be-4cbed087355f" containerName="init" Sep 29 21:43:50 crc kubenswrapper[4911]: E0929 21:43:50.088539 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="823859df-f000-4749-acfe-eb9168574272" containerName="nova-cell1-conductor-db-sync" Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.088547 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="823859df-f000-4749-acfe-eb9168574272" containerName="nova-cell1-conductor-db-sync" Sep 29 21:43:50 crc kubenswrapper[4911]: E0929 21:43:50.088564 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c8f9423-562b-49fa-946d-10d52101e44c" containerName="nova-manage" Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.088571 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c8f9423-562b-49fa-946d-10d52101e44c" containerName="nova-manage" Sep 29 21:43:50 crc kubenswrapper[4911]: E0929 21:43:50.088601 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="73cd0cda-606b-41b1-b7b6-92c6650d4dea" containerName="nova-metadata-metadata" Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.088608 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="73cd0cda-606b-41b1-b7b6-92c6650d4dea" containerName="nova-metadata-metadata" Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.089022 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c8f9423-562b-49fa-946d-10d52101e44c" containerName="nova-manage" Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.089046 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="73cd0cda-606b-41b1-b7b6-92c6650d4dea" containerName="nova-metadata-log" Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.089055 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="823859df-f000-4749-acfe-eb9168574272" containerName="nova-cell1-conductor-db-sync" Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.089068 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="73cd0cda-606b-41b1-b7b6-92c6650d4dea" containerName="nova-metadata-metadata" Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.089084 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="fcbf9287-d564-45b2-b6be-4cbed087355f" containerName="dnsmasq-dns" Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.091654 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.096903 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.097198 4911 scope.go:117] "RemoveContainer" containerID="9007993a80b139dedc9a60ad4a9aeed265a16c608ad3794a9ac114796741c51f" Sep 29 21:43:50 crc kubenswrapper[4911]: E0929 21:43:50.097637 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9007993a80b139dedc9a60ad4a9aeed265a16c608ad3794a9ac114796741c51f\": container with ID starting with 9007993a80b139dedc9a60ad4a9aeed265a16c608ad3794a9ac114796741c51f not found: ID does not exist" containerID="9007993a80b139dedc9a60ad4a9aeed265a16c608ad3794a9ac114796741c51f" Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.097668 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9007993a80b139dedc9a60ad4a9aeed265a16c608ad3794a9ac114796741c51f"} err="failed to get container status \"9007993a80b139dedc9a60ad4a9aeed265a16c608ad3794a9ac114796741c51f\": rpc error: code = NotFound desc = could not find container \"9007993a80b139dedc9a60ad4a9aeed265a16c608ad3794a9ac114796741c51f\": container with ID starting with 9007993a80b139dedc9a60ad4a9aeed265a16c608ad3794a9ac114796741c51f not found: ID does not exist" Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.097694 4911 scope.go:117] "RemoveContainer" containerID="196b7baa21c7d82333181421c08280dfcb503534339974acd2db7013924a8e0a" Sep 29 21:43:50 crc kubenswrapper[4911]: E0929 21:43:50.102019 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"196b7baa21c7d82333181421c08280dfcb503534339974acd2db7013924a8e0a\": container with ID starting with 196b7baa21c7d82333181421c08280dfcb503534339974acd2db7013924a8e0a not found: ID does not exist" containerID="196b7baa21c7d82333181421c08280dfcb503534339974acd2db7013924a8e0a" Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.102069 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"196b7baa21c7d82333181421c08280dfcb503534339974acd2db7013924a8e0a"} err="failed to get container status \"196b7baa21c7d82333181421c08280dfcb503534339974acd2db7013924a8e0a\": rpc error: code = NotFound desc = could not find container \"196b7baa21c7d82333181421c08280dfcb503534339974acd2db7013924a8e0a\": container with ID starting with 196b7baa21c7d82333181421c08280dfcb503534339974acd2db7013924a8e0a not found: ID does not exist" Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.102101 4911 scope.go:117] "RemoveContainer" containerID="9007993a80b139dedc9a60ad4a9aeed265a16c608ad3794a9ac114796741c51f" Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.107523 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9007993a80b139dedc9a60ad4a9aeed265a16c608ad3794a9ac114796741c51f"} err="failed to get container status \"9007993a80b139dedc9a60ad4a9aeed265a16c608ad3794a9ac114796741c51f\": rpc error: code = NotFound desc = could not find container \"9007993a80b139dedc9a60ad4a9aeed265a16c608ad3794a9ac114796741c51f\": container with ID starting with 9007993a80b139dedc9a60ad4a9aeed265a16c608ad3794a9ac114796741c51f not found: ID does not exist" Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.107563 4911 scope.go:117] "RemoveContainer" containerID="196b7baa21c7d82333181421c08280dfcb503534339974acd2db7013924a8e0a" Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.111302 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.114298 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb394e72-1a5f-4815-a6d2-e2636239108f-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"fb394e72-1a5f-4815-a6d2-e2636239108f\") " pod="openstack/nova-cell1-conductor-0" Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.114457 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb394e72-1a5f-4815-a6d2-e2636239108f-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"fb394e72-1a5f-4815-a6d2-e2636239108f\") " pod="openstack/nova-cell1-conductor-0" Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.114655 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-szt7x\" (UniqueName: \"kubernetes.io/projected/fb394e72-1a5f-4815-a6d2-e2636239108f-kube-api-access-szt7x\") pod \"nova-cell1-conductor-0\" (UID: \"fb394e72-1a5f-4815-a6d2-e2636239108f\") " pod="openstack/nova-cell1-conductor-0" Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.115064 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"196b7baa21c7d82333181421c08280dfcb503534339974acd2db7013924a8e0a"} err="failed to get container status \"196b7baa21c7d82333181421c08280dfcb503534339974acd2db7013924a8e0a\": rpc error: code = NotFound desc = could not find container \"196b7baa21c7d82333181421c08280dfcb503534339974acd2db7013924a8e0a\": container with ID starting with 196b7baa21c7d82333181421c08280dfcb503534339974acd2db7013924a8e0a not found: ID does not exist" Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.115105 4911 scope.go:117] "RemoveContainer" containerID="0f60db69caaf7760661f4272d40eedbed2ff2060dbe12302fec34a8b09c26d13" Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.137308 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.156757 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-q9jx9"] Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.167863 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-q9jx9"] Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.179219 4911 scope.go:117] "RemoveContainer" containerID="7e1c7dd51da3b0b8db72b0f1271c24aebd4d6fcc8294ab0005c70f59c750e0b5" Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.190321 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.192299 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.194728 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.196333 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.215917 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb394e72-1a5f-4815-a6d2-e2636239108f-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"fb394e72-1a5f-4815-a6d2-e2636239108f\") " pod="openstack/nova-cell1-conductor-0" Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.215955 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb394e72-1a5f-4815-a6d2-e2636239108f-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"fb394e72-1a5f-4815-a6d2-e2636239108f\") " pod="openstack/nova-cell1-conductor-0" Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.215992 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d5e82b41-14f5-4503-a919-e9ea37d98ead-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d5e82b41-14f5-4503-a919-e9ea37d98ead\") " pod="openstack/nova-metadata-0" Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.216037 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rr8xm\" (UniqueName: \"kubernetes.io/projected/d5e82b41-14f5-4503-a919-e9ea37d98ead-kube-api-access-rr8xm\") pod \"nova-metadata-0\" (UID: \"d5e82b41-14f5-4503-a919-e9ea37d98ead\") " pod="openstack/nova-metadata-0" Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.216060 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-szt7x\" (UniqueName: \"kubernetes.io/projected/fb394e72-1a5f-4815-a6d2-e2636239108f-kube-api-access-szt7x\") pod \"nova-cell1-conductor-0\" (UID: \"fb394e72-1a5f-4815-a6d2-e2636239108f\") " pod="openstack/nova-cell1-conductor-0" Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.216083 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d5e82b41-14f5-4503-a919-e9ea37d98ead-config-data\") pod \"nova-metadata-0\" (UID: \"d5e82b41-14f5-4503-a919-e9ea37d98ead\") " pod="openstack/nova-metadata-0" Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.216107 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/d5e82b41-14f5-4503-a919-e9ea37d98ead-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"d5e82b41-14f5-4503-a919-e9ea37d98ead\") " pod="openstack/nova-metadata-0" Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.216123 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d5e82b41-14f5-4503-a919-e9ea37d98ead-logs\") pod \"nova-metadata-0\" (UID: \"d5e82b41-14f5-4503-a919-e9ea37d98ead\") " pod="openstack/nova-metadata-0" Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.223383 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.228250 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb394e72-1a5f-4815-a6d2-e2636239108f-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"fb394e72-1a5f-4815-a6d2-e2636239108f\") " pod="openstack/nova-cell1-conductor-0" Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.230666 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb394e72-1a5f-4815-a6d2-e2636239108f-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"fb394e72-1a5f-4815-a6d2-e2636239108f\") " pod="openstack/nova-cell1-conductor-0" Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.234265 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-szt7x\" (UniqueName: \"kubernetes.io/projected/fb394e72-1a5f-4815-a6d2-e2636239108f-kube-api-access-szt7x\") pod \"nova-cell1-conductor-0\" (UID: \"fb394e72-1a5f-4815-a6d2-e2636239108f\") " pod="openstack/nova-cell1-conductor-0" Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.270767 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.270997 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="3a5c8719-7840-4411-a449-81012851c24d" containerName="kube-state-metrics" containerID="cri-o://dc6390a245b35d023d874599225b04a340d604ecdeb4228cf9a38c2c56ec8d61" gracePeriod=30 Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.318164 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d5e82b41-14f5-4503-a919-e9ea37d98ead-config-data\") pod \"nova-metadata-0\" (UID: \"d5e82b41-14f5-4503-a919-e9ea37d98ead\") " pod="openstack/nova-metadata-0" Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.318226 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/d5e82b41-14f5-4503-a919-e9ea37d98ead-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"d5e82b41-14f5-4503-a919-e9ea37d98ead\") " pod="openstack/nova-metadata-0" Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.318249 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d5e82b41-14f5-4503-a919-e9ea37d98ead-logs\") pod \"nova-metadata-0\" (UID: \"d5e82b41-14f5-4503-a919-e9ea37d98ead\") " pod="openstack/nova-metadata-0" Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.318360 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d5e82b41-14f5-4503-a919-e9ea37d98ead-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d5e82b41-14f5-4503-a919-e9ea37d98ead\") " pod="openstack/nova-metadata-0" Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.318405 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rr8xm\" (UniqueName: \"kubernetes.io/projected/d5e82b41-14f5-4503-a919-e9ea37d98ead-kube-api-access-rr8xm\") pod \"nova-metadata-0\" (UID: \"d5e82b41-14f5-4503-a919-e9ea37d98ead\") " pod="openstack/nova-metadata-0" Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.318765 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d5e82b41-14f5-4503-a919-e9ea37d98ead-logs\") pod \"nova-metadata-0\" (UID: \"d5e82b41-14f5-4503-a919-e9ea37d98ead\") " pod="openstack/nova-metadata-0" Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.321377 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d5e82b41-14f5-4503-a919-e9ea37d98ead-config-data\") pod \"nova-metadata-0\" (UID: \"d5e82b41-14f5-4503-a919-e9ea37d98ead\") " pod="openstack/nova-metadata-0" Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.321553 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d5e82b41-14f5-4503-a919-e9ea37d98ead-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d5e82b41-14f5-4503-a919-e9ea37d98ead\") " pod="openstack/nova-metadata-0" Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.322769 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/d5e82b41-14f5-4503-a919-e9ea37d98ead-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"d5e82b41-14f5-4503-a919-e9ea37d98ead\") " pod="openstack/nova-metadata-0" Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.332125 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rr8xm\" (UniqueName: \"kubernetes.io/projected/d5e82b41-14f5-4503-a919-e9ea37d98ead-kube-api-access-rr8xm\") pod \"nova-metadata-0\" (UID: \"d5e82b41-14f5-4503-a919-e9ea37d98ead\") " pod="openstack/nova-metadata-0" Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.421913 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.512682 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.682321 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.736869 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="73cd0cda-606b-41b1-b7b6-92c6650d4dea" path="/var/lib/kubelet/pods/73cd0cda-606b-41b1-b7b6-92c6650d4dea/volumes" Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.737586 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fcbf9287-d564-45b2-b6be-4cbed087355f" path="/var/lib/kubelet/pods/fcbf9287-d564-45b2-b6be-4cbed087355f/volumes" Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.831572 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gw745\" (UniqueName: \"kubernetes.io/projected/3a5c8719-7840-4411-a449-81012851c24d-kube-api-access-gw745\") pod \"3a5c8719-7840-4411-a449-81012851c24d\" (UID: \"3a5c8719-7840-4411-a449-81012851c24d\") " Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.840804 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3a5c8719-7840-4411-a449-81012851c24d-kube-api-access-gw745" (OuterVolumeSpecName: "kube-api-access-gw745") pod "3a5c8719-7840-4411-a449-81012851c24d" (UID: "3a5c8719-7840-4411-a449-81012851c24d"). InnerVolumeSpecName "kube-api-access-gw745". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:43:50 crc kubenswrapper[4911]: I0929 21:43:50.933910 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gw745\" (UniqueName: \"kubernetes.io/projected/3a5c8719-7840-4411-a449-81012851c24d-kube-api-access-gw745\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:51 crc kubenswrapper[4911]: I0929 21:43:51.061356 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 29 21:43:51 crc kubenswrapper[4911]: W0929 21:43:51.069896 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfb394e72_1a5f_4815_a6d2_e2636239108f.slice/crio-99f2aafac540ce458ce599480565438311651c05a62b49f5e4db0da6c2d0e143 WatchSource:0}: Error finding container 99f2aafac540ce458ce599480565438311651c05a62b49f5e4db0da6c2d0e143: Status 404 returned error can't find the container with id 99f2aafac540ce458ce599480565438311651c05a62b49f5e4db0da6c2d0e143 Sep 29 21:43:51 crc kubenswrapper[4911]: I0929 21:43:51.070993 4911 generic.go:334] "Generic (PLEG): container finished" podID="3a5c8719-7840-4411-a449-81012851c24d" containerID="dc6390a245b35d023d874599225b04a340d604ecdeb4228cf9a38c2c56ec8d61" exitCode=2 Sep 29 21:43:51 crc kubenswrapper[4911]: I0929 21:43:51.071081 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 29 21:43:51 crc kubenswrapper[4911]: I0929 21:43:51.071069 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"3a5c8719-7840-4411-a449-81012851c24d","Type":"ContainerDied","Data":"dc6390a245b35d023d874599225b04a340d604ecdeb4228cf9a38c2c56ec8d61"} Sep 29 21:43:51 crc kubenswrapper[4911]: I0929 21:43:51.071250 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"3a5c8719-7840-4411-a449-81012851c24d","Type":"ContainerDied","Data":"bfbbd588dfd0dd90a372f2d6f74501e4d6b9cae5079a2d8f83d90d044a04ead9"} Sep 29 21:43:51 crc kubenswrapper[4911]: I0929 21:43:51.071274 4911 scope.go:117] "RemoveContainer" containerID="dc6390a245b35d023d874599225b04a340d604ecdeb4228cf9a38c2c56ec8d61" Sep 29 21:43:51 crc kubenswrapper[4911]: I0929 21:43:51.081426 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="72f12417-f4fb-494a-abca-37933bdb860d" containerName="nova-scheduler-scheduler" containerID="cri-o://abd68a86a34c886a7d1fa5c16c8acd2f6f5b2c06b6bc536d7c96c7a40ad58c17" gracePeriod=30 Sep 29 21:43:51 crc kubenswrapper[4911]: I0929 21:43:51.094941 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 21:43:51 crc kubenswrapper[4911]: I0929 21:43:51.107969 4911 scope.go:117] "RemoveContainer" containerID="dc6390a245b35d023d874599225b04a340d604ecdeb4228cf9a38c2c56ec8d61" Sep 29 21:43:51 crc kubenswrapper[4911]: E0929 21:43:51.109003 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dc6390a245b35d023d874599225b04a340d604ecdeb4228cf9a38c2c56ec8d61\": container with ID starting with dc6390a245b35d023d874599225b04a340d604ecdeb4228cf9a38c2c56ec8d61 not found: ID does not exist" containerID="dc6390a245b35d023d874599225b04a340d604ecdeb4228cf9a38c2c56ec8d61" Sep 29 21:43:51 crc kubenswrapper[4911]: I0929 21:43:51.109051 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dc6390a245b35d023d874599225b04a340d604ecdeb4228cf9a38c2c56ec8d61"} err="failed to get container status \"dc6390a245b35d023d874599225b04a340d604ecdeb4228cf9a38c2c56ec8d61\": rpc error: code = NotFound desc = could not find container \"dc6390a245b35d023d874599225b04a340d604ecdeb4228cf9a38c2c56ec8d61\": container with ID starting with dc6390a245b35d023d874599225b04a340d604ecdeb4228cf9a38c2c56ec8d61 not found: ID does not exist" Sep 29 21:43:51 crc kubenswrapper[4911]: W0929 21:43:51.115976 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd5e82b41_14f5_4503_a919_e9ea37d98ead.slice/crio-f1d481dbac077dfc083286d7728407bb63939167d582dc6f4ec1bccb7c599774 WatchSource:0}: Error finding container f1d481dbac077dfc083286d7728407bb63939167d582dc6f4ec1bccb7c599774: Status 404 returned error can't find the container with id f1d481dbac077dfc083286d7728407bb63939167d582dc6f4ec1bccb7c599774 Sep 29 21:43:51 crc kubenswrapper[4911]: I0929 21:43:51.119728 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 21:43:51 crc kubenswrapper[4911]: I0929 21:43:51.134424 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 21:43:51 crc kubenswrapper[4911]: I0929 21:43:51.146556 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 21:43:51 crc kubenswrapper[4911]: E0929 21:43:51.146962 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a5c8719-7840-4411-a449-81012851c24d" containerName="kube-state-metrics" Sep 29 21:43:51 crc kubenswrapper[4911]: I0929 21:43:51.146983 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a5c8719-7840-4411-a449-81012851c24d" containerName="kube-state-metrics" Sep 29 21:43:51 crc kubenswrapper[4911]: I0929 21:43:51.147305 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a5c8719-7840-4411-a449-81012851c24d" containerName="kube-state-metrics" Sep 29 21:43:51 crc kubenswrapper[4911]: I0929 21:43:51.148075 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 29 21:43:51 crc kubenswrapper[4911]: I0929 21:43:51.151109 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Sep 29 21:43:51 crc kubenswrapper[4911]: I0929 21:43:51.151252 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Sep 29 21:43:51 crc kubenswrapper[4911]: I0929 21:43:51.171737 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 21:43:51 crc kubenswrapper[4911]: I0929 21:43:51.238916 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9983d5ff-ae31-4562-a659-9acb5742e5e7-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"9983d5ff-ae31-4562-a659-9acb5742e5e7\") " pod="openstack/kube-state-metrics-0" Sep 29 21:43:51 crc kubenswrapper[4911]: I0929 21:43:51.239232 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wgt2h\" (UniqueName: \"kubernetes.io/projected/9983d5ff-ae31-4562-a659-9acb5742e5e7-kube-api-access-wgt2h\") pod \"kube-state-metrics-0\" (UID: \"9983d5ff-ae31-4562-a659-9acb5742e5e7\") " pod="openstack/kube-state-metrics-0" Sep 29 21:43:51 crc kubenswrapper[4911]: I0929 21:43:51.239295 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/9983d5ff-ae31-4562-a659-9acb5742e5e7-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"9983d5ff-ae31-4562-a659-9acb5742e5e7\") " pod="openstack/kube-state-metrics-0" Sep 29 21:43:51 crc kubenswrapper[4911]: I0929 21:43:51.239330 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/9983d5ff-ae31-4562-a659-9acb5742e5e7-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"9983d5ff-ae31-4562-a659-9acb5742e5e7\") " pod="openstack/kube-state-metrics-0" Sep 29 21:43:51 crc kubenswrapper[4911]: I0929 21:43:51.340866 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/9983d5ff-ae31-4562-a659-9acb5742e5e7-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"9983d5ff-ae31-4562-a659-9acb5742e5e7\") " pod="openstack/kube-state-metrics-0" Sep 29 21:43:51 crc kubenswrapper[4911]: I0929 21:43:51.340970 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9983d5ff-ae31-4562-a659-9acb5742e5e7-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"9983d5ff-ae31-4562-a659-9acb5742e5e7\") " pod="openstack/kube-state-metrics-0" Sep 29 21:43:51 crc kubenswrapper[4911]: I0929 21:43:51.341040 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wgt2h\" (UniqueName: \"kubernetes.io/projected/9983d5ff-ae31-4562-a659-9acb5742e5e7-kube-api-access-wgt2h\") pod \"kube-state-metrics-0\" (UID: \"9983d5ff-ae31-4562-a659-9acb5742e5e7\") " pod="openstack/kube-state-metrics-0" Sep 29 21:43:51 crc kubenswrapper[4911]: I0929 21:43:51.341090 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/9983d5ff-ae31-4562-a659-9acb5742e5e7-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"9983d5ff-ae31-4562-a659-9acb5742e5e7\") " pod="openstack/kube-state-metrics-0" Sep 29 21:43:51 crc kubenswrapper[4911]: I0929 21:43:51.351528 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9983d5ff-ae31-4562-a659-9acb5742e5e7-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"9983d5ff-ae31-4562-a659-9acb5742e5e7\") " pod="openstack/kube-state-metrics-0" Sep 29 21:43:51 crc kubenswrapper[4911]: I0929 21:43:51.352390 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/9983d5ff-ae31-4562-a659-9acb5742e5e7-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"9983d5ff-ae31-4562-a659-9acb5742e5e7\") " pod="openstack/kube-state-metrics-0" Sep 29 21:43:51 crc kubenswrapper[4911]: I0929 21:43:51.361469 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wgt2h\" (UniqueName: \"kubernetes.io/projected/9983d5ff-ae31-4562-a659-9acb5742e5e7-kube-api-access-wgt2h\") pod \"kube-state-metrics-0\" (UID: \"9983d5ff-ae31-4562-a659-9acb5742e5e7\") " pod="openstack/kube-state-metrics-0" Sep 29 21:43:51 crc kubenswrapper[4911]: I0929 21:43:51.365413 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/9983d5ff-ae31-4562-a659-9acb5742e5e7-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"9983d5ff-ae31-4562-a659-9acb5742e5e7\") " pod="openstack/kube-state-metrics-0" Sep 29 21:43:51 crc kubenswrapper[4911]: I0929 21:43:51.468241 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 29 21:43:51 crc kubenswrapper[4911]: I0929 21:43:51.930563 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 21:43:51 crc kubenswrapper[4911]: W0929 21:43:51.935928 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9983d5ff_ae31_4562_a659_9acb5742e5e7.slice/crio-4e2d17c5d1b3d6b7c6bc699a53a4bae88a817595e42df75007da0e1fafa2eb59 WatchSource:0}: Error finding container 4e2d17c5d1b3d6b7c6bc699a53a4bae88a817595e42df75007da0e1fafa2eb59: Status 404 returned error can't find the container with id 4e2d17c5d1b3d6b7c6bc699a53a4bae88a817595e42df75007da0e1fafa2eb59 Sep 29 21:43:52 crc kubenswrapper[4911]: I0929 21:43:52.092281 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d5e82b41-14f5-4503-a919-e9ea37d98ead","Type":"ContainerStarted","Data":"e19a0699c90aeb6e8fde803dee2d962f454faaa447c67dd6aede907fce66da78"} Sep 29 21:43:52 crc kubenswrapper[4911]: I0929 21:43:52.092326 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d5e82b41-14f5-4503-a919-e9ea37d98ead","Type":"ContainerStarted","Data":"9e398e40eee9096c78e88b82b35278cb275b6a72012e7b3d6ffde9581f2bc329"} Sep 29 21:43:52 crc kubenswrapper[4911]: I0929 21:43:52.092336 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d5e82b41-14f5-4503-a919-e9ea37d98ead","Type":"ContainerStarted","Data":"f1d481dbac077dfc083286d7728407bb63939167d582dc6f4ec1bccb7c599774"} Sep 29 21:43:52 crc kubenswrapper[4911]: I0929 21:43:52.094679 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"fb394e72-1a5f-4815-a6d2-e2636239108f","Type":"ContainerStarted","Data":"56a9d27f535d2b530904bfdbd578a1cd55caa41c3a5e9efae0baacaa1ce527b9"} Sep 29 21:43:52 crc kubenswrapper[4911]: I0929 21:43:52.094705 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"fb394e72-1a5f-4815-a6d2-e2636239108f","Type":"ContainerStarted","Data":"99f2aafac540ce458ce599480565438311651c05a62b49f5e4db0da6c2d0e143"} Sep 29 21:43:52 crc kubenswrapper[4911]: I0929 21:43:52.095087 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Sep 29 21:43:52 crc kubenswrapper[4911]: I0929 21:43:52.096268 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"9983d5ff-ae31-4562-a659-9acb5742e5e7","Type":"ContainerStarted","Data":"4e2d17c5d1b3d6b7c6bc699a53a4bae88a817595e42df75007da0e1fafa2eb59"} Sep 29 21:43:52 crc kubenswrapper[4911]: I0929 21:43:52.110493 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.110480007 podStartE2EDuration="2.110480007s" podCreationTimestamp="2025-09-29 21:43:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:43:52.107474444 +0000 UTC m=+1110.084587115" watchObservedRunningTime="2025-09-29 21:43:52.110480007 +0000 UTC m=+1110.087592688" Sep 29 21:43:52 crc kubenswrapper[4911]: I0929 21:43:52.130636 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.130617713 podStartE2EDuration="2.130617713s" podCreationTimestamp="2025-09-29 21:43:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:43:52.124219765 +0000 UTC m=+1110.101332436" watchObservedRunningTime="2025-09-29 21:43:52.130617713 +0000 UTC m=+1110.107730384" Sep 29 21:43:52 crc kubenswrapper[4911]: I0929 21:43:52.733157 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3a5c8719-7840-4411-a449-81012851c24d" path="/var/lib/kubelet/pods/3a5c8719-7840-4411-a449-81012851c24d/volumes" Sep 29 21:43:52 crc kubenswrapper[4911]: I0929 21:43:52.734136 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 21:43:52 crc kubenswrapper[4911]: I0929 21:43:52.734370 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f8460aed-5642-4bd8-8719-2d8a18055a5f" containerName="ceilometer-central-agent" containerID="cri-o://94996a606816f75959c32d23866bbbbeb6ddb02d2693308e4bdeeb4d6020e9f5" gracePeriod=30 Sep 29 21:43:52 crc kubenswrapper[4911]: I0929 21:43:52.734507 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f8460aed-5642-4bd8-8719-2d8a18055a5f" containerName="proxy-httpd" containerID="cri-o://dc0dcbd39f0fcd2d68e33b76cdd79343bc90b98e25e0e9696695c14e3472f348" gracePeriod=30 Sep 29 21:43:52 crc kubenswrapper[4911]: I0929 21:43:52.734571 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f8460aed-5642-4bd8-8719-2d8a18055a5f" containerName="sg-core" containerID="cri-o://e9f3a7664258d68ae4bcbd1036cb3d76dd9c980d657d019ea2ccb8c498e4074d" gracePeriod=30 Sep 29 21:43:52 crc kubenswrapper[4911]: I0929 21:43:52.734604 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f8460aed-5642-4bd8-8719-2d8a18055a5f" containerName="ceilometer-notification-agent" containerID="cri-o://2dba839b28bf59c8578ed16cc4b3d68afdd0d3d90d01271f9d72ac6ac80e109a" gracePeriod=30 Sep 29 21:43:53 crc kubenswrapper[4911]: I0929 21:43:53.105404 4911 generic.go:334] "Generic (PLEG): container finished" podID="f8460aed-5642-4bd8-8719-2d8a18055a5f" containerID="dc0dcbd39f0fcd2d68e33b76cdd79343bc90b98e25e0e9696695c14e3472f348" exitCode=0 Sep 29 21:43:53 crc kubenswrapper[4911]: I0929 21:43:53.105676 4911 generic.go:334] "Generic (PLEG): container finished" podID="f8460aed-5642-4bd8-8719-2d8a18055a5f" containerID="e9f3a7664258d68ae4bcbd1036cb3d76dd9c980d657d019ea2ccb8c498e4074d" exitCode=2 Sep 29 21:43:53 crc kubenswrapper[4911]: I0929 21:43:53.105689 4911 generic.go:334] "Generic (PLEG): container finished" podID="f8460aed-5642-4bd8-8719-2d8a18055a5f" containerID="94996a606816f75959c32d23866bbbbeb6ddb02d2693308e4bdeeb4d6020e9f5" exitCode=0 Sep 29 21:43:53 crc kubenswrapper[4911]: I0929 21:43:53.107114 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f8460aed-5642-4bd8-8719-2d8a18055a5f","Type":"ContainerDied","Data":"dc0dcbd39f0fcd2d68e33b76cdd79343bc90b98e25e0e9696695c14e3472f348"} Sep 29 21:43:53 crc kubenswrapper[4911]: I0929 21:43:53.107325 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f8460aed-5642-4bd8-8719-2d8a18055a5f","Type":"ContainerDied","Data":"e9f3a7664258d68ae4bcbd1036cb3d76dd9c980d657d019ea2ccb8c498e4074d"} Sep 29 21:43:53 crc kubenswrapper[4911]: I0929 21:43:53.107471 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f8460aed-5642-4bd8-8719-2d8a18055a5f","Type":"ContainerDied","Data":"94996a606816f75959c32d23866bbbbeb6ddb02d2693308e4bdeeb4d6020e9f5"} Sep 29 21:43:53 crc kubenswrapper[4911]: I0929 21:43:53.109617 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"9983d5ff-ae31-4562-a659-9acb5742e5e7","Type":"ContainerStarted","Data":"5a575dcdcebadb59bafff257f7e51e02f8abe8b180b82d01a5f1380797b38cc3"} Sep 29 21:43:53 crc kubenswrapper[4911]: I0929 21:43:53.110343 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Sep 29 21:43:53 crc kubenswrapper[4911]: I0929 21:43:53.175687 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=1.7646350960000001 podStartE2EDuration="2.175666081s" podCreationTimestamp="2025-09-29 21:43:51 +0000 UTC" firstStartedPulling="2025-09-29 21:43:51.93809531 +0000 UTC m=+1109.915207981" lastFinishedPulling="2025-09-29 21:43:52.349126295 +0000 UTC m=+1110.326238966" observedRunningTime="2025-09-29 21:43:53.1286325 +0000 UTC m=+1111.105745211" watchObservedRunningTime="2025-09-29 21:43:53.175666081 +0000 UTC m=+1111.152778752" Sep 29 21:43:53 crc kubenswrapper[4911]: E0929 21:43:53.348803 4911 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="abd68a86a34c886a7d1fa5c16c8acd2f6f5b2c06b6bc536d7c96c7a40ad58c17" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 29 21:43:53 crc kubenswrapper[4911]: E0929 21:43:53.350153 4911 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="abd68a86a34c886a7d1fa5c16c8acd2f6f5b2c06b6bc536d7c96c7a40ad58c17" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 29 21:43:53 crc kubenswrapper[4911]: E0929 21:43:53.351718 4911 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="abd68a86a34c886a7d1fa5c16c8acd2f6f5b2c06b6bc536d7c96c7a40ad58c17" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 29 21:43:53 crc kubenswrapper[4911]: E0929 21:43:53.351745 4911 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="72f12417-f4fb-494a-abca-37933bdb860d" containerName="nova-scheduler-scheduler" Sep 29 21:43:53 crc kubenswrapper[4911]: I0929 21:43:53.830485 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.000572 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f8460aed-5642-4bd8-8719-2d8a18055a5f-log-httpd\") pod \"f8460aed-5642-4bd8-8719-2d8a18055a5f\" (UID: \"f8460aed-5642-4bd8-8719-2d8a18055a5f\") " Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.001452 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f8460aed-5642-4bd8-8719-2d8a18055a5f-run-httpd\") pod \"f8460aed-5642-4bd8-8719-2d8a18055a5f\" (UID: \"f8460aed-5642-4bd8-8719-2d8a18055a5f\") " Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.001379 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f8460aed-5642-4bd8-8719-2d8a18055a5f-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "f8460aed-5642-4bd8-8719-2d8a18055a5f" (UID: "f8460aed-5642-4bd8-8719-2d8a18055a5f"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.001739 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f7dqf\" (UniqueName: \"kubernetes.io/projected/f8460aed-5642-4bd8-8719-2d8a18055a5f-kube-api-access-f7dqf\") pod \"f8460aed-5642-4bd8-8719-2d8a18055a5f\" (UID: \"f8460aed-5642-4bd8-8719-2d8a18055a5f\") " Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.001834 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f8460aed-5642-4bd8-8719-2d8a18055a5f-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "f8460aed-5642-4bd8-8719-2d8a18055a5f" (UID: "f8460aed-5642-4bd8-8719-2d8a18055a5f"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.001964 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f8460aed-5642-4bd8-8719-2d8a18055a5f-sg-core-conf-yaml\") pod \"f8460aed-5642-4bd8-8719-2d8a18055a5f\" (UID: \"f8460aed-5642-4bd8-8719-2d8a18055a5f\") " Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.002849 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f8460aed-5642-4bd8-8719-2d8a18055a5f-config-data\") pod \"f8460aed-5642-4bd8-8719-2d8a18055a5f\" (UID: \"f8460aed-5642-4bd8-8719-2d8a18055a5f\") " Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.002934 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f8460aed-5642-4bd8-8719-2d8a18055a5f-scripts\") pod \"f8460aed-5642-4bd8-8719-2d8a18055a5f\" (UID: \"f8460aed-5642-4bd8-8719-2d8a18055a5f\") " Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.002994 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8460aed-5642-4bd8-8719-2d8a18055a5f-combined-ca-bundle\") pod \"f8460aed-5642-4bd8-8719-2d8a18055a5f\" (UID: \"f8460aed-5642-4bd8-8719-2d8a18055a5f\") " Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.004218 4911 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f8460aed-5642-4bd8-8719-2d8a18055a5f-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.004252 4911 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f8460aed-5642-4bd8-8719-2d8a18055a5f-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.013743 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f8460aed-5642-4bd8-8719-2d8a18055a5f-scripts" (OuterVolumeSpecName: "scripts") pod "f8460aed-5642-4bd8-8719-2d8a18055a5f" (UID: "f8460aed-5642-4bd8-8719-2d8a18055a5f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.018965 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f8460aed-5642-4bd8-8719-2d8a18055a5f-kube-api-access-f7dqf" (OuterVolumeSpecName: "kube-api-access-f7dqf") pod "f8460aed-5642-4bd8-8719-2d8a18055a5f" (UID: "f8460aed-5642-4bd8-8719-2d8a18055a5f"). InnerVolumeSpecName "kube-api-access-f7dqf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.044784 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f8460aed-5642-4bd8-8719-2d8a18055a5f-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "f8460aed-5642-4bd8-8719-2d8a18055a5f" (UID: "f8460aed-5642-4bd8-8719-2d8a18055a5f"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.100487 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f8460aed-5642-4bd8-8719-2d8a18055a5f-config-data" (OuterVolumeSpecName: "config-data") pod "f8460aed-5642-4bd8-8719-2d8a18055a5f" (UID: "f8460aed-5642-4bd8-8719-2d8a18055a5f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.100525 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f8460aed-5642-4bd8-8719-2d8a18055a5f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f8460aed-5642-4bd8-8719-2d8a18055a5f" (UID: "f8460aed-5642-4bd8-8719-2d8a18055a5f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.106363 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f7dqf\" (UniqueName: \"kubernetes.io/projected/f8460aed-5642-4bd8-8719-2d8a18055a5f-kube-api-access-f7dqf\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.106413 4911 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f8460aed-5642-4bd8-8719-2d8a18055a5f-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.106434 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f8460aed-5642-4bd8-8719-2d8a18055a5f-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.106487 4911 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f8460aed-5642-4bd8-8719-2d8a18055a5f-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.106504 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8460aed-5642-4bd8-8719-2d8a18055a5f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.122149 4911 generic.go:334] "Generic (PLEG): container finished" podID="f8460aed-5642-4bd8-8719-2d8a18055a5f" containerID="2dba839b28bf59c8578ed16cc4b3d68afdd0d3d90d01271f9d72ac6ac80e109a" exitCode=0 Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.122244 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.122232 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f8460aed-5642-4bd8-8719-2d8a18055a5f","Type":"ContainerDied","Data":"2dba839b28bf59c8578ed16cc4b3d68afdd0d3d90d01271f9d72ac6ac80e109a"} Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.122309 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f8460aed-5642-4bd8-8719-2d8a18055a5f","Type":"ContainerDied","Data":"a5dec418f5073e6659490e11e531ca05d0f54fc02ffaa64f4b7ea9229c09a192"} Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.122341 4911 scope.go:117] "RemoveContainer" containerID="dc0dcbd39f0fcd2d68e33b76cdd79343bc90b98e25e0e9696695c14e3472f348" Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.143356 4911 scope.go:117] "RemoveContainer" containerID="e9f3a7664258d68ae4bcbd1036cb3d76dd9c980d657d019ea2ccb8c498e4074d" Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.156085 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.174551 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.182872 4911 scope.go:117] "RemoveContainer" containerID="2dba839b28bf59c8578ed16cc4b3d68afdd0d3d90d01271f9d72ac6ac80e109a" Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.193830 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 29 21:43:54 crc kubenswrapper[4911]: E0929 21:43:54.194283 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8460aed-5642-4bd8-8719-2d8a18055a5f" containerName="ceilometer-notification-agent" Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.194296 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8460aed-5642-4bd8-8719-2d8a18055a5f" containerName="ceilometer-notification-agent" Sep 29 21:43:54 crc kubenswrapper[4911]: E0929 21:43:54.194334 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8460aed-5642-4bd8-8719-2d8a18055a5f" containerName="proxy-httpd" Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.194341 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8460aed-5642-4bd8-8719-2d8a18055a5f" containerName="proxy-httpd" Sep 29 21:43:54 crc kubenswrapper[4911]: E0929 21:43:54.194352 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8460aed-5642-4bd8-8719-2d8a18055a5f" containerName="ceilometer-central-agent" Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.194357 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8460aed-5642-4bd8-8719-2d8a18055a5f" containerName="ceilometer-central-agent" Sep 29 21:43:54 crc kubenswrapper[4911]: E0929 21:43:54.194371 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8460aed-5642-4bd8-8719-2d8a18055a5f" containerName="sg-core" Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.194377 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8460aed-5642-4bd8-8719-2d8a18055a5f" containerName="sg-core" Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.194540 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="f8460aed-5642-4bd8-8719-2d8a18055a5f" containerName="ceilometer-central-agent" Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.194554 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="f8460aed-5642-4bd8-8719-2d8a18055a5f" containerName="proxy-httpd" Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.194570 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="f8460aed-5642-4bd8-8719-2d8a18055a5f" containerName="ceilometer-notification-agent" Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.194582 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="f8460aed-5642-4bd8-8719-2d8a18055a5f" containerName="sg-core" Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.196200 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.203381 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.203566 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.203958 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.211061 4911 scope.go:117] "RemoveContainer" containerID="94996a606816f75959c32d23866bbbbeb6ddb02d2693308e4bdeeb4d6020e9f5" Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.212017 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.241353 4911 scope.go:117] "RemoveContainer" containerID="dc0dcbd39f0fcd2d68e33b76cdd79343bc90b98e25e0e9696695c14e3472f348" Sep 29 21:43:54 crc kubenswrapper[4911]: E0929 21:43:54.241709 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dc0dcbd39f0fcd2d68e33b76cdd79343bc90b98e25e0e9696695c14e3472f348\": container with ID starting with dc0dcbd39f0fcd2d68e33b76cdd79343bc90b98e25e0e9696695c14e3472f348 not found: ID does not exist" containerID="dc0dcbd39f0fcd2d68e33b76cdd79343bc90b98e25e0e9696695c14e3472f348" Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.241751 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dc0dcbd39f0fcd2d68e33b76cdd79343bc90b98e25e0e9696695c14e3472f348"} err="failed to get container status \"dc0dcbd39f0fcd2d68e33b76cdd79343bc90b98e25e0e9696695c14e3472f348\": rpc error: code = NotFound desc = could not find container \"dc0dcbd39f0fcd2d68e33b76cdd79343bc90b98e25e0e9696695c14e3472f348\": container with ID starting with dc0dcbd39f0fcd2d68e33b76cdd79343bc90b98e25e0e9696695c14e3472f348 not found: ID does not exist" Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.241779 4911 scope.go:117] "RemoveContainer" containerID="e9f3a7664258d68ae4bcbd1036cb3d76dd9c980d657d019ea2ccb8c498e4074d" Sep 29 21:43:54 crc kubenswrapper[4911]: E0929 21:43:54.242227 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e9f3a7664258d68ae4bcbd1036cb3d76dd9c980d657d019ea2ccb8c498e4074d\": container with ID starting with e9f3a7664258d68ae4bcbd1036cb3d76dd9c980d657d019ea2ccb8c498e4074d not found: ID does not exist" containerID="e9f3a7664258d68ae4bcbd1036cb3d76dd9c980d657d019ea2ccb8c498e4074d" Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.242257 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e9f3a7664258d68ae4bcbd1036cb3d76dd9c980d657d019ea2ccb8c498e4074d"} err="failed to get container status \"e9f3a7664258d68ae4bcbd1036cb3d76dd9c980d657d019ea2ccb8c498e4074d\": rpc error: code = NotFound desc = could not find container \"e9f3a7664258d68ae4bcbd1036cb3d76dd9c980d657d019ea2ccb8c498e4074d\": container with ID starting with e9f3a7664258d68ae4bcbd1036cb3d76dd9c980d657d019ea2ccb8c498e4074d not found: ID does not exist" Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.242277 4911 scope.go:117] "RemoveContainer" containerID="2dba839b28bf59c8578ed16cc4b3d68afdd0d3d90d01271f9d72ac6ac80e109a" Sep 29 21:43:54 crc kubenswrapper[4911]: E0929 21:43:54.242496 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2dba839b28bf59c8578ed16cc4b3d68afdd0d3d90d01271f9d72ac6ac80e109a\": container with ID starting with 2dba839b28bf59c8578ed16cc4b3d68afdd0d3d90d01271f9d72ac6ac80e109a not found: ID does not exist" containerID="2dba839b28bf59c8578ed16cc4b3d68afdd0d3d90d01271f9d72ac6ac80e109a" Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.242523 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2dba839b28bf59c8578ed16cc4b3d68afdd0d3d90d01271f9d72ac6ac80e109a"} err="failed to get container status \"2dba839b28bf59c8578ed16cc4b3d68afdd0d3d90d01271f9d72ac6ac80e109a\": rpc error: code = NotFound desc = could not find container \"2dba839b28bf59c8578ed16cc4b3d68afdd0d3d90d01271f9d72ac6ac80e109a\": container with ID starting with 2dba839b28bf59c8578ed16cc4b3d68afdd0d3d90d01271f9d72ac6ac80e109a not found: ID does not exist" Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.242537 4911 scope.go:117] "RemoveContainer" containerID="94996a606816f75959c32d23866bbbbeb6ddb02d2693308e4bdeeb4d6020e9f5" Sep 29 21:43:54 crc kubenswrapper[4911]: E0929 21:43:54.242738 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"94996a606816f75959c32d23866bbbbeb6ddb02d2693308e4bdeeb4d6020e9f5\": container with ID starting with 94996a606816f75959c32d23866bbbbeb6ddb02d2693308e4bdeeb4d6020e9f5 not found: ID does not exist" containerID="94996a606816f75959c32d23866bbbbeb6ddb02d2693308e4bdeeb4d6020e9f5" Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.242761 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"94996a606816f75959c32d23866bbbbeb6ddb02d2693308e4bdeeb4d6020e9f5"} err="failed to get container status \"94996a606816f75959c32d23866bbbbeb6ddb02d2693308e4bdeeb4d6020e9f5\": rpc error: code = NotFound desc = could not find container \"94996a606816f75959c32d23866bbbbeb6ddb02d2693308e4bdeeb4d6020e9f5\": container with ID starting with 94996a606816f75959c32d23866bbbbeb6ddb02d2693308e4bdeeb4d6020e9f5 not found: ID does not exist" Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.312078 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f0e47d7a-2e01-4312-a5ac-e50cf19dc83f-run-httpd\") pod \"ceilometer-0\" (UID: \"f0e47d7a-2e01-4312-a5ac-e50cf19dc83f\") " pod="openstack/ceilometer-0" Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.312365 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f0e47d7a-2e01-4312-a5ac-e50cf19dc83f-config-data\") pod \"ceilometer-0\" (UID: \"f0e47d7a-2e01-4312-a5ac-e50cf19dc83f\") " pod="openstack/ceilometer-0" Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.312418 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/f0e47d7a-2e01-4312-a5ac-e50cf19dc83f-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"f0e47d7a-2e01-4312-a5ac-e50cf19dc83f\") " pod="openstack/ceilometer-0" Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.312608 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-phlgf\" (UniqueName: \"kubernetes.io/projected/f0e47d7a-2e01-4312-a5ac-e50cf19dc83f-kube-api-access-phlgf\") pod \"ceilometer-0\" (UID: \"f0e47d7a-2e01-4312-a5ac-e50cf19dc83f\") " pod="openstack/ceilometer-0" Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.313299 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f0e47d7a-2e01-4312-a5ac-e50cf19dc83f-log-httpd\") pod \"ceilometer-0\" (UID: \"f0e47d7a-2e01-4312-a5ac-e50cf19dc83f\") " pod="openstack/ceilometer-0" Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.313326 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0e47d7a-2e01-4312-a5ac-e50cf19dc83f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f0e47d7a-2e01-4312-a5ac-e50cf19dc83f\") " pod="openstack/ceilometer-0" Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.313623 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f0e47d7a-2e01-4312-a5ac-e50cf19dc83f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f0e47d7a-2e01-4312-a5ac-e50cf19dc83f\") " pod="openstack/ceilometer-0" Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.313692 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f0e47d7a-2e01-4312-a5ac-e50cf19dc83f-scripts\") pod \"ceilometer-0\" (UID: \"f0e47d7a-2e01-4312-a5ac-e50cf19dc83f\") " pod="openstack/ceilometer-0" Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.414815 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f0e47d7a-2e01-4312-a5ac-e50cf19dc83f-run-httpd\") pod \"ceilometer-0\" (UID: \"f0e47d7a-2e01-4312-a5ac-e50cf19dc83f\") " pod="openstack/ceilometer-0" Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.414880 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f0e47d7a-2e01-4312-a5ac-e50cf19dc83f-config-data\") pod \"ceilometer-0\" (UID: \"f0e47d7a-2e01-4312-a5ac-e50cf19dc83f\") " pod="openstack/ceilometer-0" Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.414919 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/f0e47d7a-2e01-4312-a5ac-e50cf19dc83f-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"f0e47d7a-2e01-4312-a5ac-e50cf19dc83f\") " pod="openstack/ceilometer-0" Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.414957 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-phlgf\" (UniqueName: \"kubernetes.io/projected/f0e47d7a-2e01-4312-a5ac-e50cf19dc83f-kube-api-access-phlgf\") pod \"ceilometer-0\" (UID: \"f0e47d7a-2e01-4312-a5ac-e50cf19dc83f\") " pod="openstack/ceilometer-0" Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.415020 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f0e47d7a-2e01-4312-a5ac-e50cf19dc83f-log-httpd\") pod \"ceilometer-0\" (UID: \"f0e47d7a-2e01-4312-a5ac-e50cf19dc83f\") " pod="openstack/ceilometer-0" Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.415035 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0e47d7a-2e01-4312-a5ac-e50cf19dc83f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f0e47d7a-2e01-4312-a5ac-e50cf19dc83f\") " pod="openstack/ceilometer-0" Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.415052 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f0e47d7a-2e01-4312-a5ac-e50cf19dc83f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f0e47d7a-2e01-4312-a5ac-e50cf19dc83f\") " pod="openstack/ceilometer-0" Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.415081 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f0e47d7a-2e01-4312-a5ac-e50cf19dc83f-scripts\") pod \"ceilometer-0\" (UID: \"f0e47d7a-2e01-4312-a5ac-e50cf19dc83f\") " pod="openstack/ceilometer-0" Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.415969 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f0e47d7a-2e01-4312-a5ac-e50cf19dc83f-run-httpd\") pod \"ceilometer-0\" (UID: \"f0e47d7a-2e01-4312-a5ac-e50cf19dc83f\") " pod="openstack/ceilometer-0" Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.416169 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f0e47d7a-2e01-4312-a5ac-e50cf19dc83f-log-httpd\") pod \"ceilometer-0\" (UID: \"f0e47d7a-2e01-4312-a5ac-e50cf19dc83f\") " pod="openstack/ceilometer-0" Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.418964 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f0e47d7a-2e01-4312-a5ac-e50cf19dc83f-scripts\") pod \"ceilometer-0\" (UID: \"f0e47d7a-2e01-4312-a5ac-e50cf19dc83f\") " pod="openstack/ceilometer-0" Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.419434 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0e47d7a-2e01-4312-a5ac-e50cf19dc83f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f0e47d7a-2e01-4312-a5ac-e50cf19dc83f\") " pod="openstack/ceilometer-0" Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.419994 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f0e47d7a-2e01-4312-a5ac-e50cf19dc83f-config-data\") pod \"ceilometer-0\" (UID: \"f0e47d7a-2e01-4312-a5ac-e50cf19dc83f\") " pod="openstack/ceilometer-0" Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.420539 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f0e47d7a-2e01-4312-a5ac-e50cf19dc83f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f0e47d7a-2e01-4312-a5ac-e50cf19dc83f\") " pod="openstack/ceilometer-0" Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.422010 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/f0e47d7a-2e01-4312-a5ac-e50cf19dc83f-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"f0e47d7a-2e01-4312-a5ac-e50cf19dc83f\") " pod="openstack/ceilometer-0" Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.433310 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-phlgf\" (UniqueName: \"kubernetes.io/projected/f0e47d7a-2e01-4312-a5ac-e50cf19dc83f-kube-api-access-phlgf\") pod \"ceilometer-0\" (UID: \"f0e47d7a-2e01-4312-a5ac-e50cf19dc83f\") " pod="openstack/ceilometer-0" Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.516429 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.728112 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f8460aed-5642-4bd8-8719-2d8a18055a5f" path="/var/lib/kubelet/pods/f8460aed-5642-4bd8-8719-2d8a18055a5f/volumes" Sep 29 21:43:54 crc kubenswrapper[4911]: I0929 21:43:54.973427 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 21:43:55 crc kubenswrapper[4911]: I0929 21:43:55.025731 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72f12417-f4fb-494a-abca-37933bdb860d-config-data\") pod \"72f12417-f4fb-494a-abca-37933bdb860d\" (UID: \"72f12417-f4fb-494a-abca-37933bdb860d\") " Sep 29 21:43:55 crc kubenswrapper[4911]: I0929 21:43:55.025812 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72f12417-f4fb-494a-abca-37933bdb860d-combined-ca-bundle\") pod \"72f12417-f4fb-494a-abca-37933bdb860d\" (UID: \"72f12417-f4fb-494a-abca-37933bdb860d\") " Sep 29 21:43:55 crc kubenswrapper[4911]: I0929 21:43:55.025845 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sz6xj\" (UniqueName: \"kubernetes.io/projected/72f12417-f4fb-494a-abca-37933bdb860d-kube-api-access-sz6xj\") pod \"72f12417-f4fb-494a-abca-37933bdb860d\" (UID: \"72f12417-f4fb-494a-abca-37933bdb860d\") " Sep 29 21:43:55 crc kubenswrapper[4911]: I0929 21:43:55.031323 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/72f12417-f4fb-494a-abca-37933bdb860d-kube-api-access-sz6xj" (OuterVolumeSpecName: "kube-api-access-sz6xj") pod "72f12417-f4fb-494a-abca-37933bdb860d" (UID: "72f12417-f4fb-494a-abca-37933bdb860d"). InnerVolumeSpecName "kube-api-access-sz6xj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:43:55 crc kubenswrapper[4911]: I0929 21:43:55.057328 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/72f12417-f4fb-494a-abca-37933bdb860d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "72f12417-f4fb-494a-abca-37933bdb860d" (UID: "72f12417-f4fb-494a-abca-37933bdb860d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:43:55 crc kubenswrapper[4911]: I0929 21:43:55.075948 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/72f12417-f4fb-494a-abca-37933bdb860d-config-data" (OuterVolumeSpecName: "config-data") pod "72f12417-f4fb-494a-abca-37933bdb860d" (UID: "72f12417-f4fb-494a-abca-37933bdb860d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:43:55 crc kubenswrapper[4911]: I0929 21:43:55.130950 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72f12417-f4fb-494a-abca-37933bdb860d-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:55 crc kubenswrapper[4911]: I0929 21:43:55.132769 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72f12417-f4fb-494a-abca-37933bdb860d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:55 crc kubenswrapper[4911]: I0929 21:43:55.132941 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sz6xj\" (UniqueName: \"kubernetes.io/projected/72f12417-f4fb-494a-abca-37933bdb860d-kube-api-access-sz6xj\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:55 crc kubenswrapper[4911]: I0929 21:43:55.136834 4911 generic.go:334] "Generic (PLEG): container finished" podID="72f12417-f4fb-494a-abca-37933bdb860d" containerID="abd68a86a34c886a7d1fa5c16c8acd2f6f5b2c06b6bc536d7c96c7a40ad58c17" exitCode=0 Sep 29 21:43:55 crc kubenswrapper[4911]: I0929 21:43:55.137707 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 21:43:55 crc kubenswrapper[4911]: I0929 21:43:55.143003 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"72f12417-f4fb-494a-abca-37933bdb860d","Type":"ContainerDied","Data":"abd68a86a34c886a7d1fa5c16c8acd2f6f5b2c06b6bc536d7c96c7a40ad58c17"} Sep 29 21:43:55 crc kubenswrapper[4911]: I0929 21:43:55.143041 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"72f12417-f4fb-494a-abca-37933bdb860d","Type":"ContainerDied","Data":"a67bc82218d6c32ddb1488ebadaaceca10153facace219489971e5489c146489"} Sep 29 21:43:55 crc kubenswrapper[4911]: I0929 21:43:55.143059 4911 scope.go:117] "RemoveContainer" containerID="abd68a86a34c886a7d1fa5c16c8acd2f6f5b2c06b6bc536d7c96c7a40ad58c17" Sep 29 21:43:55 crc kubenswrapper[4911]: I0929 21:43:55.186582 4911 scope.go:117] "RemoveContainer" containerID="abd68a86a34c886a7d1fa5c16c8acd2f6f5b2c06b6bc536d7c96c7a40ad58c17" Sep 29 21:43:55 crc kubenswrapper[4911]: E0929 21:43:55.187623 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"abd68a86a34c886a7d1fa5c16c8acd2f6f5b2c06b6bc536d7c96c7a40ad58c17\": container with ID starting with abd68a86a34c886a7d1fa5c16c8acd2f6f5b2c06b6bc536d7c96c7a40ad58c17 not found: ID does not exist" containerID="abd68a86a34c886a7d1fa5c16c8acd2f6f5b2c06b6bc536d7c96c7a40ad58c17" Sep 29 21:43:55 crc kubenswrapper[4911]: I0929 21:43:55.187678 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"abd68a86a34c886a7d1fa5c16c8acd2f6f5b2c06b6bc536d7c96c7a40ad58c17"} err="failed to get container status \"abd68a86a34c886a7d1fa5c16c8acd2f6f5b2c06b6bc536d7c96c7a40ad58c17\": rpc error: code = NotFound desc = could not find container \"abd68a86a34c886a7d1fa5c16c8acd2f6f5b2c06b6bc536d7c96c7a40ad58c17\": container with ID starting with abd68a86a34c886a7d1fa5c16c8acd2f6f5b2c06b6bc536d7c96c7a40ad58c17 not found: ID does not exist" Sep 29 21:43:55 crc kubenswrapper[4911]: I0929 21:43:55.194667 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 21:43:55 crc kubenswrapper[4911]: I0929 21:43:55.223610 4911 patch_prober.go:28] interesting pod/machine-config-daemon-w647f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 21:43:55 crc kubenswrapper[4911]: I0929 21:43:55.223670 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 21:43:55 crc kubenswrapper[4911]: I0929 21:43:55.223724 4911 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-w647f" Sep 29 21:43:55 crc kubenswrapper[4911]: I0929 21:43:55.224690 4911 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"4a9c99b6ceab26bcf54375dc4957b5762f55e899af1d807a48454b472085e569"} pod="openshift-machine-config-operator/machine-config-daemon-w647f" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 21:43:55 crc kubenswrapper[4911]: I0929 21:43:55.224769 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" containerName="machine-config-daemon" containerID="cri-o://4a9c99b6ceab26bcf54375dc4957b5762f55e899af1d807a48454b472085e569" gracePeriod=600 Sep 29 21:43:55 crc kubenswrapper[4911]: I0929 21:43:55.229378 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 21:43:55 crc kubenswrapper[4911]: I0929 21:43:55.241844 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 21:43:55 crc kubenswrapper[4911]: I0929 21:43:55.251930 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 21:43:55 crc kubenswrapper[4911]: E0929 21:43:55.252394 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72f12417-f4fb-494a-abca-37933bdb860d" containerName="nova-scheduler-scheduler" Sep 29 21:43:55 crc kubenswrapper[4911]: I0929 21:43:55.252407 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="72f12417-f4fb-494a-abca-37933bdb860d" containerName="nova-scheduler-scheduler" Sep 29 21:43:55 crc kubenswrapper[4911]: I0929 21:43:55.252600 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="72f12417-f4fb-494a-abca-37933bdb860d" containerName="nova-scheduler-scheduler" Sep 29 21:43:55 crc kubenswrapper[4911]: I0929 21:43:55.253259 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 21:43:55 crc kubenswrapper[4911]: I0929 21:43:55.258152 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 21:43:55 crc kubenswrapper[4911]: I0929 21:43:55.266946 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Sep 29 21:43:55 crc kubenswrapper[4911]: I0929 21:43:55.336316 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4625af6e-56d3-4c1e-bac1-1ba58400e121-config-data\") pod \"nova-scheduler-0\" (UID: \"4625af6e-56d3-4c1e-bac1-1ba58400e121\") " pod="openstack/nova-scheduler-0" Sep 29 21:43:55 crc kubenswrapper[4911]: I0929 21:43:55.336453 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4625af6e-56d3-4c1e-bac1-1ba58400e121-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"4625af6e-56d3-4c1e-bac1-1ba58400e121\") " pod="openstack/nova-scheduler-0" Sep 29 21:43:55 crc kubenswrapper[4911]: I0929 21:43:55.336473 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mvwcb\" (UniqueName: \"kubernetes.io/projected/4625af6e-56d3-4c1e-bac1-1ba58400e121-kube-api-access-mvwcb\") pod \"nova-scheduler-0\" (UID: \"4625af6e-56d3-4c1e-bac1-1ba58400e121\") " pod="openstack/nova-scheduler-0" Sep 29 21:43:55 crc kubenswrapper[4911]: I0929 21:43:55.437648 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4625af6e-56d3-4c1e-bac1-1ba58400e121-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"4625af6e-56d3-4c1e-bac1-1ba58400e121\") " pod="openstack/nova-scheduler-0" Sep 29 21:43:55 crc kubenswrapper[4911]: I0929 21:43:55.437687 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mvwcb\" (UniqueName: \"kubernetes.io/projected/4625af6e-56d3-4c1e-bac1-1ba58400e121-kube-api-access-mvwcb\") pod \"nova-scheduler-0\" (UID: \"4625af6e-56d3-4c1e-bac1-1ba58400e121\") " pod="openstack/nova-scheduler-0" Sep 29 21:43:55 crc kubenswrapper[4911]: I0929 21:43:55.437744 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4625af6e-56d3-4c1e-bac1-1ba58400e121-config-data\") pod \"nova-scheduler-0\" (UID: \"4625af6e-56d3-4c1e-bac1-1ba58400e121\") " pod="openstack/nova-scheduler-0" Sep 29 21:43:55 crc kubenswrapper[4911]: I0929 21:43:55.442546 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4625af6e-56d3-4c1e-bac1-1ba58400e121-config-data\") pod \"nova-scheduler-0\" (UID: \"4625af6e-56d3-4c1e-bac1-1ba58400e121\") " pod="openstack/nova-scheduler-0" Sep 29 21:43:55 crc kubenswrapper[4911]: I0929 21:43:55.443611 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4625af6e-56d3-4c1e-bac1-1ba58400e121-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"4625af6e-56d3-4c1e-bac1-1ba58400e121\") " pod="openstack/nova-scheduler-0" Sep 29 21:43:55 crc kubenswrapper[4911]: I0929 21:43:55.454323 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mvwcb\" (UniqueName: \"kubernetes.io/projected/4625af6e-56d3-4c1e-bac1-1ba58400e121-kube-api-access-mvwcb\") pod \"nova-scheduler-0\" (UID: \"4625af6e-56d3-4c1e-bac1-1ba58400e121\") " pod="openstack/nova-scheduler-0" Sep 29 21:43:55 crc kubenswrapper[4911]: I0929 21:43:55.513913 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 29 21:43:55 crc kubenswrapper[4911]: I0929 21:43:55.513973 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 29 21:43:55 crc kubenswrapper[4911]: I0929 21:43:55.601905 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 21:43:56 crc kubenswrapper[4911]: I0929 21:43:56.122845 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 21:43:56 crc kubenswrapper[4911]: I0929 21:43:56.127473 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 21:43:56 crc kubenswrapper[4911]: W0929 21:43:56.131988 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4625af6e_56d3_4c1e_bac1_1ba58400e121.slice/crio-5198e47a1cc442817c3fd3c9a8d05c9820fa5e0e863e8f6e9bc37c11c13432cf WatchSource:0}: Error finding container 5198e47a1cc442817c3fd3c9a8d05c9820fa5e0e863e8f6e9bc37c11c13432cf: Status 404 returned error can't find the container with id 5198e47a1cc442817c3fd3c9a8d05c9820fa5e0e863e8f6e9bc37c11c13432cf Sep 29 21:43:56 crc kubenswrapper[4911]: I0929 21:43:56.151343 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/01ef4ed4-a76d-4896-9c3e-e62abb21d2cb-config-data\") pod \"01ef4ed4-a76d-4896-9c3e-e62abb21d2cb\" (UID: \"01ef4ed4-a76d-4896-9c3e-e62abb21d2cb\") " Sep 29 21:43:56 crc kubenswrapper[4911]: I0929 21:43:56.151387 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/01ef4ed4-a76d-4896-9c3e-e62abb21d2cb-logs\") pod \"01ef4ed4-a76d-4896-9c3e-e62abb21d2cb\" (UID: \"01ef4ed4-a76d-4896-9c3e-e62abb21d2cb\") " Sep 29 21:43:56 crc kubenswrapper[4911]: I0929 21:43:56.151519 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01ef4ed4-a76d-4896-9c3e-e62abb21d2cb-combined-ca-bundle\") pod \"01ef4ed4-a76d-4896-9c3e-e62abb21d2cb\" (UID: \"01ef4ed4-a76d-4896-9c3e-e62abb21d2cb\") " Sep 29 21:43:56 crc kubenswrapper[4911]: I0929 21:43:56.151654 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-brr78\" (UniqueName: \"kubernetes.io/projected/01ef4ed4-a76d-4896-9c3e-e62abb21d2cb-kube-api-access-brr78\") pod \"01ef4ed4-a76d-4896-9c3e-e62abb21d2cb\" (UID: \"01ef4ed4-a76d-4896-9c3e-e62abb21d2cb\") " Sep 29 21:43:56 crc kubenswrapper[4911]: I0929 21:43:56.152252 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/01ef4ed4-a76d-4896-9c3e-e62abb21d2cb-logs" (OuterVolumeSpecName: "logs") pod "01ef4ed4-a76d-4896-9c3e-e62abb21d2cb" (UID: "01ef4ed4-a76d-4896-9c3e-e62abb21d2cb"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:43:56 crc kubenswrapper[4911]: I0929 21:43:56.152641 4911 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/01ef4ed4-a76d-4896-9c3e-e62abb21d2cb-logs\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:56 crc kubenswrapper[4911]: I0929 21:43:56.156927 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ef4ed4-a76d-4896-9c3e-e62abb21d2cb-kube-api-access-brr78" (OuterVolumeSpecName: "kube-api-access-brr78") pod "01ef4ed4-a76d-4896-9c3e-e62abb21d2cb" (UID: "01ef4ed4-a76d-4896-9c3e-e62abb21d2cb"). InnerVolumeSpecName "kube-api-access-brr78". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:43:56 crc kubenswrapper[4911]: I0929 21:43:56.179889 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ef4ed4-a76d-4896-9c3e-e62abb21d2cb-config-data" (OuterVolumeSpecName: "config-data") pod "01ef4ed4-a76d-4896-9c3e-e62abb21d2cb" (UID: "01ef4ed4-a76d-4896-9c3e-e62abb21d2cb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:43:56 crc kubenswrapper[4911]: I0929 21:43:56.180828 4911 generic.go:334] "Generic (PLEG): container finished" podID="01ef4ed4-a76d-4896-9c3e-e62abb21d2cb" containerID="090c00f2e7dad0c3d8266cb2b8839832fcb9aa37984f6d48891aaf53dab3e072" exitCode=0 Sep 29 21:43:56 crc kubenswrapper[4911]: I0929 21:43:56.180915 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"01ef4ed4-a76d-4896-9c3e-e62abb21d2cb","Type":"ContainerDied","Data":"090c00f2e7dad0c3d8266cb2b8839832fcb9aa37984f6d48891aaf53dab3e072"} Sep 29 21:43:56 crc kubenswrapper[4911]: I0929 21:43:56.180944 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"01ef4ed4-a76d-4896-9c3e-e62abb21d2cb","Type":"ContainerDied","Data":"0226b37e9a1ad402d36b0204f6b1274cfe7b841d5bc0324a0be3b140f6ba1e33"} Sep 29 21:43:56 crc kubenswrapper[4911]: I0929 21:43:56.180991 4911 scope.go:117] "RemoveContainer" containerID="090c00f2e7dad0c3d8266cb2b8839832fcb9aa37984f6d48891aaf53dab3e072" Sep 29 21:43:56 crc kubenswrapper[4911]: I0929 21:43:56.181037 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 21:43:56 crc kubenswrapper[4911]: I0929 21:43:56.187151 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f0e47d7a-2e01-4312-a5ac-e50cf19dc83f","Type":"ContainerStarted","Data":"4886d129aa8fe63ae3936ff0f109c24e3a458eaa8c023a27356ccaaebb9dc8f4"} Sep 29 21:43:56 crc kubenswrapper[4911]: I0929 21:43:56.187190 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f0e47d7a-2e01-4312-a5ac-e50cf19dc83f","Type":"ContainerStarted","Data":"7fa74fe9b6dc423f1e5ddfcd7bb33e14c6d4afd908d73fefaef29efbf30cc833"} Sep 29 21:43:56 crc kubenswrapper[4911]: I0929 21:43:56.191236 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ef4ed4-a76d-4896-9c3e-e62abb21d2cb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "01ef4ed4-a76d-4896-9c3e-e62abb21d2cb" (UID: "01ef4ed4-a76d-4896-9c3e-e62abb21d2cb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:43:56 crc kubenswrapper[4911]: I0929 21:43:56.192549 4911 generic.go:334] "Generic (PLEG): container finished" podID="50640abc-40db-4390-82d1-f3cfc76da71c" containerID="4a9c99b6ceab26bcf54375dc4957b5762f55e899af1d807a48454b472085e569" exitCode=0 Sep 29 21:43:56 crc kubenswrapper[4911]: I0929 21:43:56.192605 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w647f" event={"ID":"50640abc-40db-4390-82d1-f3cfc76da71c","Type":"ContainerDied","Data":"4a9c99b6ceab26bcf54375dc4957b5762f55e899af1d807a48454b472085e569"} Sep 29 21:43:56 crc kubenswrapper[4911]: I0929 21:43:56.192622 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w647f" event={"ID":"50640abc-40db-4390-82d1-f3cfc76da71c","Type":"ContainerStarted","Data":"134746612eaa8a9a73112d2bf779a2a7d4f9c664598301b1e0b9cd02784dad89"} Sep 29 21:43:56 crc kubenswrapper[4911]: I0929 21:43:56.195970 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"4625af6e-56d3-4c1e-bac1-1ba58400e121","Type":"ContainerStarted","Data":"5198e47a1cc442817c3fd3c9a8d05c9820fa5e0e863e8f6e9bc37c11c13432cf"} Sep 29 21:43:56 crc kubenswrapper[4911]: I0929 21:43:56.221009 4911 scope.go:117] "RemoveContainer" containerID="c80e174571862f4d87767d57130616550e7f6ea265f82cb7160fd3a001c8f630" Sep 29 21:43:56 crc kubenswrapper[4911]: I0929 21:43:56.239992 4911 scope.go:117] "RemoveContainer" containerID="090c00f2e7dad0c3d8266cb2b8839832fcb9aa37984f6d48891aaf53dab3e072" Sep 29 21:43:56 crc kubenswrapper[4911]: E0929 21:43:56.240567 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"090c00f2e7dad0c3d8266cb2b8839832fcb9aa37984f6d48891aaf53dab3e072\": container with ID starting with 090c00f2e7dad0c3d8266cb2b8839832fcb9aa37984f6d48891aaf53dab3e072 not found: ID does not exist" containerID="090c00f2e7dad0c3d8266cb2b8839832fcb9aa37984f6d48891aaf53dab3e072" Sep 29 21:43:56 crc kubenswrapper[4911]: I0929 21:43:56.240616 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"090c00f2e7dad0c3d8266cb2b8839832fcb9aa37984f6d48891aaf53dab3e072"} err="failed to get container status \"090c00f2e7dad0c3d8266cb2b8839832fcb9aa37984f6d48891aaf53dab3e072\": rpc error: code = NotFound desc = could not find container \"090c00f2e7dad0c3d8266cb2b8839832fcb9aa37984f6d48891aaf53dab3e072\": container with ID starting with 090c00f2e7dad0c3d8266cb2b8839832fcb9aa37984f6d48891aaf53dab3e072 not found: ID does not exist" Sep 29 21:43:56 crc kubenswrapper[4911]: I0929 21:43:56.240639 4911 scope.go:117] "RemoveContainer" containerID="c80e174571862f4d87767d57130616550e7f6ea265f82cb7160fd3a001c8f630" Sep 29 21:43:56 crc kubenswrapper[4911]: E0929 21:43:56.240910 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c80e174571862f4d87767d57130616550e7f6ea265f82cb7160fd3a001c8f630\": container with ID starting with c80e174571862f4d87767d57130616550e7f6ea265f82cb7160fd3a001c8f630 not found: ID does not exist" containerID="c80e174571862f4d87767d57130616550e7f6ea265f82cb7160fd3a001c8f630" Sep 29 21:43:56 crc kubenswrapper[4911]: I0929 21:43:56.240954 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c80e174571862f4d87767d57130616550e7f6ea265f82cb7160fd3a001c8f630"} err="failed to get container status \"c80e174571862f4d87767d57130616550e7f6ea265f82cb7160fd3a001c8f630\": rpc error: code = NotFound desc = could not find container \"c80e174571862f4d87767d57130616550e7f6ea265f82cb7160fd3a001c8f630\": container with ID starting with c80e174571862f4d87767d57130616550e7f6ea265f82cb7160fd3a001c8f630 not found: ID does not exist" Sep 29 21:43:56 crc kubenswrapper[4911]: I0929 21:43:56.240971 4911 scope.go:117] "RemoveContainer" containerID="5dec289dec4d2c01af51bffa09906f4044b26096f9186eb8b0d1b24f0055ec27" Sep 29 21:43:56 crc kubenswrapper[4911]: I0929 21:43:56.255641 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-brr78\" (UniqueName: \"kubernetes.io/projected/01ef4ed4-a76d-4896-9c3e-e62abb21d2cb-kube-api-access-brr78\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:56 crc kubenswrapper[4911]: I0929 21:43:56.255671 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/01ef4ed4-a76d-4896-9c3e-e62abb21d2cb-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:56 crc kubenswrapper[4911]: I0929 21:43:56.255680 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01ef4ed4-a76d-4896-9c3e-e62abb21d2cb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 21:43:56 crc kubenswrapper[4911]: I0929 21:43:56.513029 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 29 21:43:56 crc kubenswrapper[4911]: I0929 21:43:56.524036 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Sep 29 21:43:56 crc kubenswrapper[4911]: I0929 21:43:56.541689 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Sep 29 21:43:56 crc kubenswrapper[4911]: E0929 21:43:56.542106 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01ef4ed4-a76d-4896-9c3e-e62abb21d2cb" containerName="nova-api-log" Sep 29 21:43:56 crc kubenswrapper[4911]: I0929 21:43:56.542125 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="01ef4ed4-a76d-4896-9c3e-e62abb21d2cb" containerName="nova-api-log" Sep 29 21:43:56 crc kubenswrapper[4911]: E0929 21:43:56.542156 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01ef4ed4-a76d-4896-9c3e-e62abb21d2cb" containerName="nova-api-api" Sep 29 21:43:56 crc kubenswrapper[4911]: I0929 21:43:56.542163 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="01ef4ed4-a76d-4896-9c3e-e62abb21d2cb" containerName="nova-api-api" Sep 29 21:43:56 crc kubenswrapper[4911]: I0929 21:43:56.542339 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="01ef4ed4-a76d-4896-9c3e-e62abb21d2cb" containerName="nova-api-log" Sep 29 21:43:56 crc kubenswrapper[4911]: I0929 21:43:56.542364 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="01ef4ed4-a76d-4896-9c3e-e62abb21d2cb" containerName="nova-api-api" Sep 29 21:43:56 crc kubenswrapper[4911]: I0929 21:43:56.543261 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 21:43:56 crc kubenswrapper[4911]: I0929 21:43:56.545651 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Sep 29 21:43:56 crc kubenswrapper[4911]: I0929 21:43:56.557049 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 29 21:43:56 crc kubenswrapper[4911]: I0929 21:43:56.665032 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gc7f4\" (UniqueName: \"kubernetes.io/projected/620be3e0-40aa-43f0-aa16-9a54e7910bf9-kube-api-access-gc7f4\") pod \"nova-api-0\" (UID: \"620be3e0-40aa-43f0-aa16-9a54e7910bf9\") " pod="openstack/nova-api-0" Sep 29 21:43:56 crc kubenswrapper[4911]: I0929 21:43:56.665129 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/620be3e0-40aa-43f0-aa16-9a54e7910bf9-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"620be3e0-40aa-43f0-aa16-9a54e7910bf9\") " pod="openstack/nova-api-0" Sep 29 21:43:56 crc kubenswrapper[4911]: I0929 21:43:56.665771 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/620be3e0-40aa-43f0-aa16-9a54e7910bf9-config-data\") pod \"nova-api-0\" (UID: \"620be3e0-40aa-43f0-aa16-9a54e7910bf9\") " pod="openstack/nova-api-0" Sep 29 21:43:56 crc kubenswrapper[4911]: I0929 21:43:56.665874 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/620be3e0-40aa-43f0-aa16-9a54e7910bf9-logs\") pod \"nova-api-0\" (UID: \"620be3e0-40aa-43f0-aa16-9a54e7910bf9\") " pod="openstack/nova-api-0" Sep 29 21:43:56 crc kubenswrapper[4911]: I0929 21:43:56.713020 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ef4ed4-a76d-4896-9c3e-e62abb21d2cb" path="/var/lib/kubelet/pods/01ef4ed4-a76d-4896-9c3e-e62abb21d2cb/volumes" Sep 29 21:43:56 crc kubenswrapper[4911]: I0929 21:43:56.713839 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="72f12417-f4fb-494a-abca-37933bdb860d" path="/var/lib/kubelet/pods/72f12417-f4fb-494a-abca-37933bdb860d/volumes" Sep 29 21:43:56 crc kubenswrapper[4911]: I0929 21:43:56.767860 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/620be3e0-40aa-43f0-aa16-9a54e7910bf9-config-data\") pod \"nova-api-0\" (UID: \"620be3e0-40aa-43f0-aa16-9a54e7910bf9\") " pod="openstack/nova-api-0" Sep 29 21:43:56 crc kubenswrapper[4911]: I0929 21:43:56.767920 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/620be3e0-40aa-43f0-aa16-9a54e7910bf9-logs\") pod \"nova-api-0\" (UID: \"620be3e0-40aa-43f0-aa16-9a54e7910bf9\") " pod="openstack/nova-api-0" Sep 29 21:43:56 crc kubenswrapper[4911]: I0929 21:43:56.767962 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gc7f4\" (UniqueName: \"kubernetes.io/projected/620be3e0-40aa-43f0-aa16-9a54e7910bf9-kube-api-access-gc7f4\") pod \"nova-api-0\" (UID: \"620be3e0-40aa-43f0-aa16-9a54e7910bf9\") " pod="openstack/nova-api-0" Sep 29 21:43:56 crc kubenswrapper[4911]: I0929 21:43:56.768023 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/620be3e0-40aa-43f0-aa16-9a54e7910bf9-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"620be3e0-40aa-43f0-aa16-9a54e7910bf9\") " pod="openstack/nova-api-0" Sep 29 21:43:56 crc kubenswrapper[4911]: I0929 21:43:56.770423 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/620be3e0-40aa-43f0-aa16-9a54e7910bf9-logs\") pod \"nova-api-0\" (UID: \"620be3e0-40aa-43f0-aa16-9a54e7910bf9\") " pod="openstack/nova-api-0" Sep 29 21:43:56 crc kubenswrapper[4911]: I0929 21:43:56.774416 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/620be3e0-40aa-43f0-aa16-9a54e7910bf9-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"620be3e0-40aa-43f0-aa16-9a54e7910bf9\") " pod="openstack/nova-api-0" Sep 29 21:43:56 crc kubenswrapper[4911]: I0929 21:43:56.779092 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/620be3e0-40aa-43f0-aa16-9a54e7910bf9-config-data\") pod \"nova-api-0\" (UID: \"620be3e0-40aa-43f0-aa16-9a54e7910bf9\") " pod="openstack/nova-api-0" Sep 29 21:43:56 crc kubenswrapper[4911]: I0929 21:43:56.790696 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gc7f4\" (UniqueName: \"kubernetes.io/projected/620be3e0-40aa-43f0-aa16-9a54e7910bf9-kube-api-access-gc7f4\") pod \"nova-api-0\" (UID: \"620be3e0-40aa-43f0-aa16-9a54e7910bf9\") " pod="openstack/nova-api-0" Sep 29 21:43:56 crc kubenswrapper[4911]: I0929 21:43:56.867512 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 21:43:57 crc kubenswrapper[4911]: I0929 21:43:57.207681 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"4625af6e-56d3-4c1e-bac1-1ba58400e121","Type":"ContainerStarted","Data":"536eb3ebe43fffa2e7fcb559b3c2271b8c722bf6e77c1c9dc7d5b845ee7bdc59"} Sep 29 21:43:57 crc kubenswrapper[4911]: I0929 21:43:57.210780 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f0e47d7a-2e01-4312-a5ac-e50cf19dc83f","Type":"ContainerStarted","Data":"d126d151934ac7ee9bd8170490ea53bc016f3c464b7e08868aa4f6adcde9e8d4"} Sep 29 21:43:57 crc kubenswrapper[4911]: I0929 21:43:57.229766 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.229749196 podStartE2EDuration="2.229749196s" podCreationTimestamp="2025-09-29 21:43:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:43:57.221627153 +0000 UTC m=+1115.198739834" watchObservedRunningTime="2025-09-29 21:43:57.229749196 +0000 UTC m=+1115.206861877" Sep 29 21:43:57 crc kubenswrapper[4911]: I0929 21:43:57.332595 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 29 21:43:57 crc kubenswrapper[4911]: W0929 21:43:57.338075 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod620be3e0_40aa_43f0_aa16_9a54e7910bf9.slice/crio-171ae6d0ce4e80f3251494317255cb43c5e897eb6042faca0e50738b458fe0e2 WatchSource:0}: Error finding container 171ae6d0ce4e80f3251494317255cb43c5e897eb6042faca0e50738b458fe0e2: Status 404 returned error can't find the container with id 171ae6d0ce4e80f3251494317255cb43c5e897eb6042faca0e50738b458fe0e2 Sep 29 21:43:58 crc kubenswrapper[4911]: I0929 21:43:58.231650 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f0e47d7a-2e01-4312-a5ac-e50cf19dc83f","Type":"ContainerStarted","Data":"cd013e7fb578c8ebd09fb39657f183347c5d1c2738b7a07dc772144392e2f5c4"} Sep 29 21:43:58 crc kubenswrapper[4911]: I0929 21:43:58.234106 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"620be3e0-40aa-43f0-aa16-9a54e7910bf9","Type":"ContainerStarted","Data":"a3cfda70a18c85f5908ff7c373064046ea0644133e3980fc5e0e203a924f445e"} Sep 29 21:43:58 crc kubenswrapper[4911]: I0929 21:43:58.234181 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"620be3e0-40aa-43f0-aa16-9a54e7910bf9","Type":"ContainerStarted","Data":"dd6ca6948de14449ed846ae82303dee5b08744fa4f6de42040c729bd3792855e"} Sep 29 21:43:58 crc kubenswrapper[4911]: I0929 21:43:58.234201 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"620be3e0-40aa-43f0-aa16-9a54e7910bf9","Type":"ContainerStarted","Data":"171ae6d0ce4e80f3251494317255cb43c5e897eb6042faca0e50738b458fe0e2"} Sep 29 21:44:00 crc kubenswrapper[4911]: I0929 21:44:00.254574 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f0e47d7a-2e01-4312-a5ac-e50cf19dc83f","Type":"ContainerStarted","Data":"f19d7cad8f8fc1541c097578af91ce3ba1fc2366c4558bae77b327014268349c"} Sep 29 21:44:00 crc kubenswrapper[4911]: I0929 21:44:00.255157 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 29 21:44:00 crc kubenswrapper[4911]: I0929 21:44:00.282022 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=4.282002305 podStartE2EDuration="4.282002305s" podCreationTimestamp="2025-09-29 21:43:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:43:58.263302976 +0000 UTC m=+1116.240415687" watchObservedRunningTime="2025-09-29 21:44:00.282002305 +0000 UTC m=+1118.259114976" Sep 29 21:44:00 crc kubenswrapper[4911]: I0929 21:44:00.283283 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.406972395 podStartE2EDuration="6.283278234s" podCreationTimestamp="2025-09-29 21:43:54 +0000 UTC" firstStartedPulling="2025-09-29 21:43:55.209339125 +0000 UTC m=+1113.186451796" lastFinishedPulling="2025-09-29 21:43:59.085644964 +0000 UTC m=+1117.062757635" observedRunningTime="2025-09-29 21:44:00.27639641 +0000 UTC m=+1118.253509081" watchObservedRunningTime="2025-09-29 21:44:00.283278234 +0000 UTC m=+1118.260390905" Sep 29 21:44:00 crc kubenswrapper[4911]: I0929 21:44:00.461497 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Sep 29 21:44:00 crc kubenswrapper[4911]: I0929 21:44:00.514285 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 29 21:44:00 crc kubenswrapper[4911]: I0929 21:44:00.514348 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 29 21:44:00 crc kubenswrapper[4911]: I0929 21:44:00.603112 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Sep 29 21:44:01 crc kubenswrapper[4911]: I0929 21:44:01.481841 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Sep 29 21:44:01 crc kubenswrapper[4911]: I0929 21:44:01.527896 4911 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="d5e82b41-14f5-4503-a919-e9ea37d98ead" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.193:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 29 21:44:01 crc kubenswrapper[4911]: I0929 21:44:01.527989 4911 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="d5e82b41-14f5-4503-a919-e9ea37d98ead" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.193:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 29 21:44:05 crc kubenswrapper[4911]: I0929 21:44:05.603558 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Sep 29 21:44:05 crc kubenswrapper[4911]: I0929 21:44:05.655991 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Sep 29 21:44:06 crc kubenswrapper[4911]: I0929 21:44:06.338562 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Sep 29 21:44:06 crc kubenswrapper[4911]: I0929 21:44:06.867858 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 29 21:44:06 crc kubenswrapper[4911]: I0929 21:44:06.867917 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 29 21:44:07 crc kubenswrapper[4911]: I0929 21:44:07.950001 4911 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="620be3e0-40aa-43f0-aa16-9a54e7910bf9" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.197:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 29 21:44:07 crc kubenswrapper[4911]: I0929 21:44:07.950302 4911 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="620be3e0-40aa-43f0-aa16-9a54e7910bf9" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.197:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 29 21:44:10 crc kubenswrapper[4911]: I0929 21:44:10.524912 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Sep 29 21:44:10 crc kubenswrapper[4911]: I0929 21:44:10.525420 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Sep 29 21:44:10 crc kubenswrapper[4911]: I0929 21:44:10.539778 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Sep 29 21:44:10 crc kubenswrapper[4911]: I0929 21:44:10.540835 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Sep 29 21:44:14 crc kubenswrapper[4911]: I0929 21:44:14.385231 4911 generic.go:334] "Generic (PLEG): container finished" podID="5189e725-065a-4a70-8e07-4b19758add3c" containerID="84c9b8aa455c4b9d4dec7ff584f98682a4936de9f034bc182a3098fe1e82adac" exitCode=137 Sep 29 21:44:14 crc kubenswrapper[4911]: I0929 21:44:14.385303 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"5189e725-065a-4a70-8e07-4b19758add3c","Type":"ContainerDied","Data":"84c9b8aa455c4b9d4dec7ff584f98682a4936de9f034bc182a3098fe1e82adac"} Sep 29 21:44:14 crc kubenswrapper[4911]: I0929 21:44:14.385817 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"5189e725-065a-4a70-8e07-4b19758add3c","Type":"ContainerDied","Data":"9c5f8988bfb591bd75bbe5ac8f886839ddb78a59b1836b626d0c2651ff61ec7a"} Sep 29 21:44:14 crc kubenswrapper[4911]: I0929 21:44:14.385833 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9c5f8988bfb591bd75bbe5ac8f886839ddb78a59b1836b626d0c2651ff61ec7a" Sep 29 21:44:14 crc kubenswrapper[4911]: I0929 21:44:14.426653 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 29 21:44:14 crc kubenswrapper[4911]: I0929 21:44:14.527950 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p62s6\" (UniqueName: \"kubernetes.io/projected/5189e725-065a-4a70-8e07-4b19758add3c-kube-api-access-p62s6\") pod \"5189e725-065a-4a70-8e07-4b19758add3c\" (UID: \"5189e725-065a-4a70-8e07-4b19758add3c\") " Sep 29 21:44:14 crc kubenswrapper[4911]: I0929 21:44:14.528028 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5189e725-065a-4a70-8e07-4b19758add3c-combined-ca-bundle\") pod \"5189e725-065a-4a70-8e07-4b19758add3c\" (UID: \"5189e725-065a-4a70-8e07-4b19758add3c\") " Sep 29 21:44:14 crc kubenswrapper[4911]: I0929 21:44:14.528122 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5189e725-065a-4a70-8e07-4b19758add3c-config-data\") pod \"5189e725-065a-4a70-8e07-4b19758add3c\" (UID: \"5189e725-065a-4a70-8e07-4b19758add3c\") " Sep 29 21:44:14 crc kubenswrapper[4911]: I0929 21:44:14.537088 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5189e725-065a-4a70-8e07-4b19758add3c-kube-api-access-p62s6" (OuterVolumeSpecName: "kube-api-access-p62s6") pod "5189e725-065a-4a70-8e07-4b19758add3c" (UID: "5189e725-065a-4a70-8e07-4b19758add3c"). InnerVolumeSpecName "kube-api-access-p62s6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:44:14 crc kubenswrapper[4911]: I0929 21:44:14.556646 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5189e725-065a-4a70-8e07-4b19758add3c-config-data" (OuterVolumeSpecName: "config-data") pod "5189e725-065a-4a70-8e07-4b19758add3c" (UID: "5189e725-065a-4a70-8e07-4b19758add3c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:44:14 crc kubenswrapper[4911]: I0929 21:44:14.593194 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5189e725-065a-4a70-8e07-4b19758add3c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5189e725-065a-4a70-8e07-4b19758add3c" (UID: "5189e725-065a-4a70-8e07-4b19758add3c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:44:14 crc kubenswrapper[4911]: I0929 21:44:14.631041 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p62s6\" (UniqueName: \"kubernetes.io/projected/5189e725-065a-4a70-8e07-4b19758add3c-kube-api-access-p62s6\") on node \"crc\" DevicePath \"\"" Sep 29 21:44:14 crc kubenswrapper[4911]: I0929 21:44:14.631089 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5189e725-065a-4a70-8e07-4b19758add3c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 21:44:14 crc kubenswrapper[4911]: I0929 21:44:14.631107 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5189e725-065a-4a70-8e07-4b19758add3c-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 21:44:15 crc kubenswrapper[4911]: I0929 21:44:15.394997 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 29 21:44:15 crc kubenswrapper[4911]: I0929 21:44:15.419748 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 21:44:15 crc kubenswrapper[4911]: I0929 21:44:15.435718 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 21:44:15 crc kubenswrapper[4911]: I0929 21:44:15.453889 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 21:44:15 crc kubenswrapper[4911]: E0929 21:44:15.454364 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5189e725-065a-4a70-8e07-4b19758add3c" containerName="nova-cell1-novncproxy-novncproxy" Sep 29 21:44:15 crc kubenswrapper[4911]: I0929 21:44:15.454380 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="5189e725-065a-4a70-8e07-4b19758add3c" containerName="nova-cell1-novncproxy-novncproxy" Sep 29 21:44:15 crc kubenswrapper[4911]: I0929 21:44:15.454629 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="5189e725-065a-4a70-8e07-4b19758add3c" containerName="nova-cell1-novncproxy-novncproxy" Sep 29 21:44:15 crc kubenswrapper[4911]: I0929 21:44:15.455307 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 29 21:44:15 crc kubenswrapper[4911]: I0929 21:44:15.457543 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Sep 29 21:44:15 crc kubenswrapper[4911]: I0929 21:44:15.459255 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Sep 29 21:44:15 crc kubenswrapper[4911]: I0929 21:44:15.459451 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Sep 29 21:44:15 crc kubenswrapper[4911]: I0929 21:44:15.466381 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 21:44:15 crc kubenswrapper[4911]: I0929 21:44:15.549069 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vnzmp\" (UniqueName: \"kubernetes.io/projected/752cc0da-4c33-4e13-9dbc-e7034e072533-kube-api-access-vnzmp\") pod \"nova-cell1-novncproxy-0\" (UID: \"752cc0da-4c33-4e13-9dbc-e7034e072533\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 21:44:15 crc kubenswrapper[4911]: I0929 21:44:15.549128 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/752cc0da-4c33-4e13-9dbc-e7034e072533-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"752cc0da-4c33-4e13-9dbc-e7034e072533\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 21:44:15 crc kubenswrapper[4911]: I0929 21:44:15.549351 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/752cc0da-4c33-4e13-9dbc-e7034e072533-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"752cc0da-4c33-4e13-9dbc-e7034e072533\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 21:44:15 crc kubenswrapper[4911]: I0929 21:44:15.549406 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/752cc0da-4c33-4e13-9dbc-e7034e072533-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"752cc0da-4c33-4e13-9dbc-e7034e072533\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 21:44:15 crc kubenswrapper[4911]: I0929 21:44:15.549682 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/752cc0da-4c33-4e13-9dbc-e7034e072533-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"752cc0da-4c33-4e13-9dbc-e7034e072533\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 21:44:15 crc kubenswrapper[4911]: I0929 21:44:15.651124 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/752cc0da-4c33-4e13-9dbc-e7034e072533-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"752cc0da-4c33-4e13-9dbc-e7034e072533\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 21:44:15 crc kubenswrapper[4911]: I0929 21:44:15.651173 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/752cc0da-4c33-4e13-9dbc-e7034e072533-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"752cc0da-4c33-4e13-9dbc-e7034e072533\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 21:44:15 crc kubenswrapper[4911]: I0929 21:44:15.651252 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/752cc0da-4c33-4e13-9dbc-e7034e072533-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"752cc0da-4c33-4e13-9dbc-e7034e072533\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 21:44:15 crc kubenswrapper[4911]: I0929 21:44:15.651302 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vnzmp\" (UniqueName: \"kubernetes.io/projected/752cc0da-4c33-4e13-9dbc-e7034e072533-kube-api-access-vnzmp\") pod \"nova-cell1-novncproxy-0\" (UID: \"752cc0da-4c33-4e13-9dbc-e7034e072533\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 21:44:15 crc kubenswrapper[4911]: I0929 21:44:15.651338 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/752cc0da-4c33-4e13-9dbc-e7034e072533-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"752cc0da-4c33-4e13-9dbc-e7034e072533\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 21:44:15 crc kubenswrapper[4911]: I0929 21:44:15.655674 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/752cc0da-4c33-4e13-9dbc-e7034e072533-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"752cc0da-4c33-4e13-9dbc-e7034e072533\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 21:44:15 crc kubenswrapper[4911]: I0929 21:44:15.656288 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/752cc0da-4c33-4e13-9dbc-e7034e072533-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"752cc0da-4c33-4e13-9dbc-e7034e072533\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 21:44:15 crc kubenswrapper[4911]: I0929 21:44:15.657183 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/752cc0da-4c33-4e13-9dbc-e7034e072533-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"752cc0da-4c33-4e13-9dbc-e7034e072533\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 21:44:15 crc kubenswrapper[4911]: I0929 21:44:15.658500 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/752cc0da-4c33-4e13-9dbc-e7034e072533-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"752cc0da-4c33-4e13-9dbc-e7034e072533\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 21:44:15 crc kubenswrapper[4911]: I0929 21:44:15.669249 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vnzmp\" (UniqueName: \"kubernetes.io/projected/752cc0da-4c33-4e13-9dbc-e7034e072533-kube-api-access-vnzmp\") pod \"nova-cell1-novncproxy-0\" (UID: \"752cc0da-4c33-4e13-9dbc-e7034e072533\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 21:44:15 crc kubenswrapper[4911]: I0929 21:44:15.772503 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 29 21:44:16 crc kubenswrapper[4911]: I0929 21:44:16.272765 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 21:44:16 crc kubenswrapper[4911]: I0929 21:44:16.408437 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"752cc0da-4c33-4e13-9dbc-e7034e072533","Type":"ContainerStarted","Data":"f4e7b529c09817e41b91242becde5bfa7d56c7a970d2ad342ed82f4549a75bcb"} Sep 29 21:44:16 crc kubenswrapper[4911]: I0929 21:44:16.712518 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5189e725-065a-4a70-8e07-4b19758add3c" path="/var/lib/kubelet/pods/5189e725-065a-4a70-8e07-4b19758add3c/volumes" Sep 29 21:44:16 crc kubenswrapper[4911]: I0929 21:44:16.873496 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Sep 29 21:44:16 crc kubenswrapper[4911]: I0929 21:44:16.873565 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Sep 29 21:44:16 crc kubenswrapper[4911]: I0929 21:44:16.874061 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Sep 29 21:44:16 crc kubenswrapper[4911]: I0929 21:44:16.874111 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Sep 29 21:44:16 crc kubenswrapper[4911]: I0929 21:44:16.878241 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Sep 29 21:44:16 crc kubenswrapper[4911]: I0929 21:44:16.880007 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Sep 29 21:44:17 crc kubenswrapper[4911]: I0929 21:44:17.108314 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-j2scw"] Sep 29 21:44:17 crc kubenswrapper[4911]: I0929 21:44:17.110516 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59cf4bdb65-j2scw" Sep 29 21:44:17 crc kubenswrapper[4911]: I0929 21:44:17.146490 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-j2scw"] Sep 29 21:44:17 crc kubenswrapper[4911]: I0929 21:44:17.176939 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bdlz6\" (UniqueName: \"kubernetes.io/projected/037d778b-c252-409b-9b11-0bd0911d7d4f-kube-api-access-bdlz6\") pod \"dnsmasq-dns-59cf4bdb65-j2scw\" (UID: \"037d778b-c252-409b-9b11-0bd0911d7d4f\") " pod="openstack/dnsmasq-dns-59cf4bdb65-j2scw" Sep 29 21:44:17 crc kubenswrapper[4911]: I0929 21:44:17.177035 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/037d778b-c252-409b-9b11-0bd0911d7d4f-dns-svc\") pod \"dnsmasq-dns-59cf4bdb65-j2scw\" (UID: \"037d778b-c252-409b-9b11-0bd0911d7d4f\") " pod="openstack/dnsmasq-dns-59cf4bdb65-j2scw" Sep 29 21:44:17 crc kubenswrapper[4911]: I0929 21:44:17.177085 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/037d778b-c252-409b-9b11-0bd0911d7d4f-ovsdbserver-sb\") pod \"dnsmasq-dns-59cf4bdb65-j2scw\" (UID: \"037d778b-c252-409b-9b11-0bd0911d7d4f\") " pod="openstack/dnsmasq-dns-59cf4bdb65-j2scw" Sep 29 21:44:17 crc kubenswrapper[4911]: I0929 21:44:17.177115 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/037d778b-c252-409b-9b11-0bd0911d7d4f-ovsdbserver-nb\") pod \"dnsmasq-dns-59cf4bdb65-j2scw\" (UID: \"037d778b-c252-409b-9b11-0bd0911d7d4f\") " pod="openstack/dnsmasq-dns-59cf4bdb65-j2scw" Sep 29 21:44:17 crc kubenswrapper[4911]: I0929 21:44:17.177148 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/037d778b-c252-409b-9b11-0bd0911d7d4f-config\") pod \"dnsmasq-dns-59cf4bdb65-j2scw\" (UID: \"037d778b-c252-409b-9b11-0bd0911d7d4f\") " pod="openstack/dnsmasq-dns-59cf4bdb65-j2scw" Sep 29 21:44:17 crc kubenswrapper[4911]: I0929 21:44:17.177202 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/037d778b-c252-409b-9b11-0bd0911d7d4f-dns-swift-storage-0\") pod \"dnsmasq-dns-59cf4bdb65-j2scw\" (UID: \"037d778b-c252-409b-9b11-0bd0911d7d4f\") " pod="openstack/dnsmasq-dns-59cf4bdb65-j2scw" Sep 29 21:44:17 crc kubenswrapper[4911]: I0929 21:44:17.279254 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/037d778b-c252-409b-9b11-0bd0911d7d4f-config\") pod \"dnsmasq-dns-59cf4bdb65-j2scw\" (UID: \"037d778b-c252-409b-9b11-0bd0911d7d4f\") " pod="openstack/dnsmasq-dns-59cf4bdb65-j2scw" Sep 29 21:44:17 crc kubenswrapper[4911]: I0929 21:44:17.279360 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/037d778b-c252-409b-9b11-0bd0911d7d4f-dns-swift-storage-0\") pod \"dnsmasq-dns-59cf4bdb65-j2scw\" (UID: \"037d778b-c252-409b-9b11-0bd0911d7d4f\") " pod="openstack/dnsmasq-dns-59cf4bdb65-j2scw" Sep 29 21:44:17 crc kubenswrapper[4911]: I0929 21:44:17.279410 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bdlz6\" (UniqueName: \"kubernetes.io/projected/037d778b-c252-409b-9b11-0bd0911d7d4f-kube-api-access-bdlz6\") pod \"dnsmasq-dns-59cf4bdb65-j2scw\" (UID: \"037d778b-c252-409b-9b11-0bd0911d7d4f\") " pod="openstack/dnsmasq-dns-59cf4bdb65-j2scw" Sep 29 21:44:17 crc kubenswrapper[4911]: I0929 21:44:17.279443 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/037d778b-c252-409b-9b11-0bd0911d7d4f-dns-svc\") pod \"dnsmasq-dns-59cf4bdb65-j2scw\" (UID: \"037d778b-c252-409b-9b11-0bd0911d7d4f\") " pod="openstack/dnsmasq-dns-59cf4bdb65-j2scw" Sep 29 21:44:17 crc kubenswrapper[4911]: I0929 21:44:17.279489 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/037d778b-c252-409b-9b11-0bd0911d7d4f-ovsdbserver-sb\") pod \"dnsmasq-dns-59cf4bdb65-j2scw\" (UID: \"037d778b-c252-409b-9b11-0bd0911d7d4f\") " pod="openstack/dnsmasq-dns-59cf4bdb65-j2scw" Sep 29 21:44:17 crc kubenswrapper[4911]: I0929 21:44:17.279516 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/037d778b-c252-409b-9b11-0bd0911d7d4f-ovsdbserver-nb\") pod \"dnsmasq-dns-59cf4bdb65-j2scw\" (UID: \"037d778b-c252-409b-9b11-0bd0911d7d4f\") " pod="openstack/dnsmasq-dns-59cf4bdb65-j2scw" Sep 29 21:44:17 crc kubenswrapper[4911]: I0929 21:44:17.280252 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/037d778b-c252-409b-9b11-0bd0911d7d4f-ovsdbserver-nb\") pod \"dnsmasq-dns-59cf4bdb65-j2scw\" (UID: \"037d778b-c252-409b-9b11-0bd0911d7d4f\") " pod="openstack/dnsmasq-dns-59cf4bdb65-j2scw" Sep 29 21:44:17 crc kubenswrapper[4911]: I0929 21:44:17.280379 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/037d778b-c252-409b-9b11-0bd0911d7d4f-dns-svc\") pod \"dnsmasq-dns-59cf4bdb65-j2scw\" (UID: \"037d778b-c252-409b-9b11-0bd0911d7d4f\") " pod="openstack/dnsmasq-dns-59cf4bdb65-j2scw" Sep 29 21:44:17 crc kubenswrapper[4911]: I0929 21:44:17.280528 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/037d778b-c252-409b-9b11-0bd0911d7d4f-ovsdbserver-sb\") pod \"dnsmasq-dns-59cf4bdb65-j2scw\" (UID: \"037d778b-c252-409b-9b11-0bd0911d7d4f\") " pod="openstack/dnsmasq-dns-59cf4bdb65-j2scw" Sep 29 21:44:17 crc kubenswrapper[4911]: I0929 21:44:17.280905 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/037d778b-c252-409b-9b11-0bd0911d7d4f-config\") pod \"dnsmasq-dns-59cf4bdb65-j2scw\" (UID: \"037d778b-c252-409b-9b11-0bd0911d7d4f\") " pod="openstack/dnsmasq-dns-59cf4bdb65-j2scw" Sep 29 21:44:17 crc kubenswrapper[4911]: I0929 21:44:17.281158 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/037d778b-c252-409b-9b11-0bd0911d7d4f-dns-swift-storage-0\") pod \"dnsmasq-dns-59cf4bdb65-j2scw\" (UID: \"037d778b-c252-409b-9b11-0bd0911d7d4f\") " pod="openstack/dnsmasq-dns-59cf4bdb65-j2scw" Sep 29 21:44:17 crc kubenswrapper[4911]: I0929 21:44:17.299806 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bdlz6\" (UniqueName: \"kubernetes.io/projected/037d778b-c252-409b-9b11-0bd0911d7d4f-kube-api-access-bdlz6\") pod \"dnsmasq-dns-59cf4bdb65-j2scw\" (UID: \"037d778b-c252-409b-9b11-0bd0911d7d4f\") " pod="openstack/dnsmasq-dns-59cf4bdb65-j2scw" Sep 29 21:44:17 crc kubenswrapper[4911]: I0929 21:44:17.416880 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"752cc0da-4c33-4e13-9dbc-e7034e072533","Type":"ContainerStarted","Data":"cf1e0a41431c797fbbe11a7e48297a84f3bf05c6772a45140fd902ad0064fa87"} Sep 29 21:44:17 crc kubenswrapper[4911]: I0929 21:44:17.438284 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.438268931 podStartE2EDuration="2.438268931s" podCreationTimestamp="2025-09-29 21:44:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:44:17.433395329 +0000 UTC m=+1135.410508020" watchObservedRunningTime="2025-09-29 21:44:17.438268931 +0000 UTC m=+1135.415381602" Sep 29 21:44:17 crc kubenswrapper[4911]: I0929 21:44:17.452428 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59cf4bdb65-j2scw" Sep 29 21:44:17 crc kubenswrapper[4911]: I0929 21:44:17.942006 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-j2scw"] Sep 29 21:44:17 crc kubenswrapper[4911]: W0929 21:44:17.946028 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod037d778b_c252_409b_9b11_0bd0911d7d4f.slice/crio-06942fd91df31dba7c9922582f94c62b85abbe26286a096b2561da6ac1c3524f WatchSource:0}: Error finding container 06942fd91df31dba7c9922582f94c62b85abbe26286a096b2561da6ac1c3524f: Status 404 returned error can't find the container with id 06942fd91df31dba7c9922582f94c62b85abbe26286a096b2561da6ac1c3524f Sep 29 21:44:18 crc kubenswrapper[4911]: I0929 21:44:18.427606 4911 generic.go:334] "Generic (PLEG): container finished" podID="037d778b-c252-409b-9b11-0bd0911d7d4f" containerID="32003fac499ee3408b751a04f57a209711e9161bd58b8fee319d9fc3e6ec835c" exitCode=0 Sep 29 21:44:18 crc kubenswrapper[4911]: I0929 21:44:18.427828 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59cf4bdb65-j2scw" event={"ID":"037d778b-c252-409b-9b11-0bd0911d7d4f","Type":"ContainerDied","Data":"32003fac499ee3408b751a04f57a209711e9161bd58b8fee319d9fc3e6ec835c"} Sep 29 21:44:18 crc kubenswrapper[4911]: I0929 21:44:18.428904 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59cf4bdb65-j2scw" event={"ID":"037d778b-c252-409b-9b11-0bd0911d7d4f","Type":"ContainerStarted","Data":"06942fd91df31dba7c9922582f94c62b85abbe26286a096b2561da6ac1c3524f"} Sep 29 21:44:19 crc kubenswrapper[4911]: I0929 21:44:19.246544 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 21:44:19 crc kubenswrapper[4911]: I0929 21:44:19.247590 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f0e47d7a-2e01-4312-a5ac-e50cf19dc83f" containerName="ceilometer-central-agent" containerID="cri-o://4886d129aa8fe63ae3936ff0f109c24e3a458eaa8c023a27356ccaaebb9dc8f4" gracePeriod=30 Sep 29 21:44:19 crc kubenswrapper[4911]: I0929 21:44:19.247656 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f0e47d7a-2e01-4312-a5ac-e50cf19dc83f" containerName="sg-core" containerID="cri-o://cd013e7fb578c8ebd09fb39657f183347c5d1c2738b7a07dc772144392e2f5c4" gracePeriod=30 Sep 29 21:44:19 crc kubenswrapper[4911]: I0929 21:44:19.247656 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f0e47d7a-2e01-4312-a5ac-e50cf19dc83f" containerName="ceilometer-notification-agent" containerID="cri-o://d126d151934ac7ee9bd8170490ea53bc016f3c464b7e08868aa4f6adcde9e8d4" gracePeriod=30 Sep 29 21:44:19 crc kubenswrapper[4911]: I0929 21:44:19.247599 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f0e47d7a-2e01-4312-a5ac-e50cf19dc83f" containerName="proxy-httpd" containerID="cri-o://f19d7cad8f8fc1541c097578af91ce3ba1fc2366c4558bae77b327014268349c" gracePeriod=30 Sep 29 21:44:19 crc kubenswrapper[4911]: I0929 21:44:19.258414 4911 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="f0e47d7a-2e01-4312-a5ac-e50cf19dc83f" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.195:3000/\": EOF" Sep 29 21:44:19 crc kubenswrapper[4911]: I0929 21:44:19.438536 4911 generic.go:334] "Generic (PLEG): container finished" podID="f0e47d7a-2e01-4312-a5ac-e50cf19dc83f" containerID="cd013e7fb578c8ebd09fb39657f183347c5d1c2738b7a07dc772144392e2f5c4" exitCode=2 Sep 29 21:44:19 crc kubenswrapper[4911]: I0929 21:44:19.438598 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f0e47d7a-2e01-4312-a5ac-e50cf19dc83f","Type":"ContainerDied","Data":"cd013e7fb578c8ebd09fb39657f183347c5d1c2738b7a07dc772144392e2f5c4"} Sep 29 21:44:19 crc kubenswrapper[4911]: I0929 21:44:19.441011 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59cf4bdb65-j2scw" event={"ID":"037d778b-c252-409b-9b11-0bd0911d7d4f","Type":"ContainerStarted","Data":"c7917b7026a466b130b34fd319d1181f8256315131c83c1d784faeccd99a5b8d"} Sep 29 21:44:19 crc kubenswrapper[4911]: I0929 21:44:19.442206 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-59cf4bdb65-j2scw" Sep 29 21:44:19 crc kubenswrapper[4911]: I0929 21:44:19.465052 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-59cf4bdb65-j2scw" podStartSLOduration=2.46503104 podStartE2EDuration="2.46503104s" podCreationTimestamp="2025-09-29 21:44:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:44:19.460731775 +0000 UTC m=+1137.437844476" watchObservedRunningTime="2025-09-29 21:44:19.46503104 +0000 UTC m=+1137.442143721" Sep 29 21:44:19 crc kubenswrapper[4911]: I0929 21:44:19.493876 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 29 21:44:19 crc kubenswrapper[4911]: I0929 21:44:19.494389 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="620be3e0-40aa-43f0-aa16-9a54e7910bf9" containerName="nova-api-log" containerID="cri-o://dd6ca6948de14449ed846ae82303dee5b08744fa4f6de42040c729bd3792855e" gracePeriod=30 Sep 29 21:44:19 crc kubenswrapper[4911]: I0929 21:44:19.494534 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="620be3e0-40aa-43f0-aa16-9a54e7910bf9" containerName="nova-api-api" containerID="cri-o://a3cfda70a18c85f5908ff7c373064046ea0644133e3980fc5e0e203a924f445e" gracePeriod=30 Sep 29 21:44:20 crc kubenswrapper[4911]: I0929 21:44:20.454953 4911 generic.go:334] "Generic (PLEG): container finished" podID="620be3e0-40aa-43f0-aa16-9a54e7910bf9" containerID="dd6ca6948de14449ed846ae82303dee5b08744fa4f6de42040c729bd3792855e" exitCode=143 Sep 29 21:44:20 crc kubenswrapper[4911]: I0929 21:44:20.455034 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"620be3e0-40aa-43f0-aa16-9a54e7910bf9","Type":"ContainerDied","Data":"dd6ca6948de14449ed846ae82303dee5b08744fa4f6de42040c729bd3792855e"} Sep 29 21:44:20 crc kubenswrapper[4911]: I0929 21:44:20.465772 4911 generic.go:334] "Generic (PLEG): container finished" podID="f0e47d7a-2e01-4312-a5ac-e50cf19dc83f" containerID="f19d7cad8f8fc1541c097578af91ce3ba1fc2366c4558bae77b327014268349c" exitCode=0 Sep 29 21:44:20 crc kubenswrapper[4911]: I0929 21:44:20.465848 4911 generic.go:334] "Generic (PLEG): container finished" podID="f0e47d7a-2e01-4312-a5ac-e50cf19dc83f" containerID="4886d129aa8fe63ae3936ff0f109c24e3a458eaa8c023a27356ccaaebb9dc8f4" exitCode=0 Sep 29 21:44:20 crc kubenswrapper[4911]: I0929 21:44:20.466659 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f0e47d7a-2e01-4312-a5ac-e50cf19dc83f","Type":"ContainerDied","Data":"f19d7cad8f8fc1541c097578af91ce3ba1fc2366c4558bae77b327014268349c"} Sep 29 21:44:20 crc kubenswrapper[4911]: I0929 21:44:20.466700 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f0e47d7a-2e01-4312-a5ac-e50cf19dc83f","Type":"ContainerDied","Data":"4886d129aa8fe63ae3936ff0f109c24e3a458eaa8c023a27356ccaaebb9dc8f4"} Sep 29 21:44:20 crc kubenswrapper[4911]: I0929 21:44:20.772618 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.190146 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.290023 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.319188 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/620be3e0-40aa-43f0-aa16-9a54e7910bf9-combined-ca-bundle\") pod \"620be3e0-40aa-43f0-aa16-9a54e7910bf9\" (UID: \"620be3e0-40aa-43f0-aa16-9a54e7910bf9\") " Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.319673 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gc7f4\" (UniqueName: \"kubernetes.io/projected/620be3e0-40aa-43f0-aa16-9a54e7910bf9-kube-api-access-gc7f4\") pod \"620be3e0-40aa-43f0-aa16-9a54e7910bf9\" (UID: \"620be3e0-40aa-43f0-aa16-9a54e7910bf9\") " Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.319916 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/620be3e0-40aa-43f0-aa16-9a54e7910bf9-logs\") pod \"620be3e0-40aa-43f0-aa16-9a54e7910bf9\" (UID: \"620be3e0-40aa-43f0-aa16-9a54e7910bf9\") " Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.320167 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/620be3e0-40aa-43f0-aa16-9a54e7910bf9-config-data\") pod \"620be3e0-40aa-43f0-aa16-9a54e7910bf9\" (UID: \"620be3e0-40aa-43f0-aa16-9a54e7910bf9\") " Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.320778 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/620be3e0-40aa-43f0-aa16-9a54e7910bf9-logs" (OuterVolumeSpecName: "logs") pod "620be3e0-40aa-43f0-aa16-9a54e7910bf9" (UID: "620be3e0-40aa-43f0-aa16-9a54e7910bf9"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.325257 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/620be3e0-40aa-43f0-aa16-9a54e7910bf9-kube-api-access-gc7f4" (OuterVolumeSpecName: "kube-api-access-gc7f4") pod "620be3e0-40aa-43f0-aa16-9a54e7910bf9" (UID: "620be3e0-40aa-43f0-aa16-9a54e7910bf9"). InnerVolumeSpecName "kube-api-access-gc7f4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.345136 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/620be3e0-40aa-43f0-aa16-9a54e7910bf9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "620be3e0-40aa-43f0-aa16-9a54e7910bf9" (UID: "620be3e0-40aa-43f0-aa16-9a54e7910bf9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.359581 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/620be3e0-40aa-43f0-aa16-9a54e7910bf9-config-data" (OuterVolumeSpecName: "config-data") pod "620be3e0-40aa-43f0-aa16-9a54e7910bf9" (UID: "620be3e0-40aa-43f0-aa16-9a54e7910bf9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.423477 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0e47d7a-2e01-4312-a5ac-e50cf19dc83f-combined-ca-bundle\") pod \"f0e47d7a-2e01-4312-a5ac-e50cf19dc83f\" (UID: \"f0e47d7a-2e01-4312-a5ac-e50cf19dc83f\") " Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.423600 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f0e47d7a-2e01-4312-a5ac-e50cf19dc83f-config-data\") pod \"f0e47d7a-2e01-4312-a5ac-e50cf19dc83f\" (UID: \"f0e47d7a-2e01-4312-a5ac-e50cf19dc83f\") " Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.423637 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f0e47d7a-2e01-4312-a5ac-e50cf19dc83f-log-httpd\") pod \"f0e47d7a-2e01-4312-a5ac-e50cf19dc83f\" (UID: \"f0e47d7a-2e01-4312-a5ac-e50cf19dc83f\") " Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.423667 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/f0e47d7a-2e01-4312-a5ac-e50cf19dc83f-ceilometer-tls-certs\") pod \"f0e47d7a-2e01-4312-a5ac-e50cf19dc83f\" (UID: \"f0e47d7a-2e01-4312-a5ac-e50cf19dc83f\") " Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.423747 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-phlgf\" (UniqueName: \"kubernetes.io/projected/f0e47d7a-2e01-4312-a5ac-e50cf19dc83f-kube-api-access-phlgf\") pod \"f0e47d7a-2e01-4312-a5ac-e50cf19dc83f\" (UID: \"f0e47d7a-2e01-4312-a5ac-e50cf19dc83f\") " Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.423810 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f0e47d7a-2e01-4312-a5ac-e50cf19dc83f-scripts\") pod \"f0e47d7a-2e01-4312-a5ac-e50cf19dc83f\" (UID: \"f0e47d7a-2e01-4312-a5ac-e50cf19dc83f\") " Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.423885 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f0e47d7a-2e01-4312-a5ac-e50cf19dc83f-run-httpd\") pod \"f0e47d7a-2e01-4312-a5ac-e50cf19dc83f\" (UID: \"f0e47d7a-2e01-4312-a5ac-e50cf19dc83f\") " Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.423929 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f0e47d7a-2e01-4312-a5ac-e50cf19dc83f-sg-core-conf-yaml\") pod \"f0e47d7a-2e01-4312-a5ac-e50cf19dc83f\" (UID: \"f0e47d7a-2e01-4312-a5ac-e50cf19dc83f\") " Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.424273 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/620be3e0-40aa-43f0-aa16-9a54e7910bf9-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.424312 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/620be3e0-40aa-43f0-aa16-9a54e7910bf9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.424324 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gc7f4\" (UniqueName: \"kubernetes.io/projected/620be3e0-40aa-43f0-aa16-9a54e7910bf9-kube-api-access-gc7f4\") on node \"crc\" DevicePath \"\"" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.424334 4911 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/620be3e0-40aa-43f0-aa16-9a54e7910bf9-logs\") on node \"crc\" DevicePath \"\"" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.425200 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f0e47d7a-2e01-4312-a5ac-e50cf19dc83f-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "f0e47d7a-2e01-4312-a5ac-e50cf19dc83f" (UID: "f0e47d7a-2e01-4312-a5ac-e50cf19dc83f"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.425483 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f0e47d7a-2e01-4312-a5ac-e50cf19dc83f-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "f0e47d7a-2e01-4312-a5ac-e50cf19dc83f" (UID: "f0e47d7a-2e01-4312-a5ac-e50cf19dc83f"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.429145 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f0e47d7a-2e01-4312-a5ac-e50cf19dc83f-kube-api-access-phlgf" (OuterVolumeSpecName: "kube-api-access-phlgf") pod "f0e47d7a-2e01-4312-a5ac-e50cf19dc83f" (UID: "f0e47d7a-2e01-4312-a5ac-e50cf19dc83f"). InnerVolumeSpecName "kube-api-access-phlgf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.431125 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f0e47d7a-2e01-4312-a5ac-e50cf19dc83f-scripts" (OuterVolumeSpecName: "scripts") pod "f0e47d7a-2e01-4312-a5ac-e50cf19dc83f" (UID: "f0e47d7a-2e01-4312-a5ac-e50cf19dc83f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.455062 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f0e47d7a-2e01-4312-a5ac-e50cf19dc83f-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "f0e47d7a-2e01-4312-a5ac-e50cf19dc83f" (UID: "f0e47d7a-2e01-4312-a5ac-e50cf19dc83f"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.475042 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f0e47d7a-2e01-4312-a5ac-e50cf19dc83f-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "f0e47d7a-2e01-4312-a5ac-e50cf19dc83f" (UID: "f0e47d7a-2e01-4312-a5ac-e50cf19dc83f"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.495425 4911 generic.go:334] "Generic (PLEG): container finished" podID="620be3e0-40aa-43f0-aa16-9a54e7910bf9" containerID="a3cfda70a18c85f5908ff7c373064046ea0644133e3980fc5e0e203a924f445e" exitCode=0 Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.495533 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.495725 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"620be3e0-40aa-43f0-aa16-9a54e7910bf9","Type":"ContainerDied","Data":"a3cfda70a18c85f5908ff7c373064046ea0644133e3980fc5e0e203a924f445e"} Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.495881 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"620be3e0-40aa-43f0-aa16-9a54e7910bf9","Type":"ContainerDied","Data":"171ae6d0ce4e80f3251494317255cb43c5e897eb6042faca0e50738b458fe0e2"} Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.495966 4911 scope.go:117] "RemoveContainer" containerID="a3cfda70a18c85f5908ff7c373064046ea0644133e3980fc5e0e203a924f445e" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.499131 4911 generic.go:334] "Generic (PLEG): container finished" podID="f0e47d7a-2e01-4312-a5ac-e50cf19dc83f" containerID="d126d151934ac7ee9bd8170490ea53bc016f3c464b7e08868aa4f6adcde9e8d4" exitCode=0 Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.499170 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f0e47d7a-2e01-4312-a5ac-e50cf19dc83f","Type":"ContainerDied","Data":"d126d151934ac7ee9bd8170490ea53bc016f3c464b7e08868aa4f6adcde9e8d4"} Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.499229 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f0e47d7a-2e01-4312-a5ac-e50cf19dc83f","Type":"ContainerDied","Data":"7fa74fe9b6dc423f1e5ddfcd7bb33e14c6d4afd908d73fefaef29efbf30cc833"} Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.499884 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.503765 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f0e47d7a-2e01-4312-a5ac-e50cf19dc83f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f0e47d7a-2e01-4312-a5ac-e50cf19dc83f" (UID: "f0e47d7a-2e01-4312-a5ac-e50cf19dc83f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.529561 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0e47d7a-2e01-4312-a5ac-e50cf19dc83f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.529597 4911 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f0e47d7a-2e01-4312-a5ac-e50cf19dc83f-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.529612 4911 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/f0e47d7a-2e01-4312-a5ac-e50cf19dc83f-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.529625 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-phlgf\" (UniqueName: \"kubernetes.io/projected/f0e47d7a-2e01-4312-a5ac-e50cf19dc83f-kube-api-access-phlgf\") on node \"crc\" DevicePath \"\"" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.529636 4911 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f0e47d7a-2e01-4312-a5ac-e50cf19dc83f-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.529648 4911 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f0e47d7a-2e01-4312-a5ac-e50cf19dc83f-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.529681 4911 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f0e47d7a-2e01-4312-a5ac-e50cf19dc83f-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.534396 4911 scope.go:117] "RemoveContainer" containerID="dd6ca6948de14449ed846ae82303dee5b08744fa4f6de42040c729bd3792855e" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.551242 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.562118 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f0e47d7a-2e01-4312-a5ac-e50cf19dc83f-config-data" (OuterVolumeSpecName: "config-data") pod "f0e47d7a-2e01-4312-a5ac-e50cf19dc83f" (UID: "f0e47d7a-2e01-4312-a5ac-e50cf19dc83f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.567076 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.573268 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Sep 29 21:44:23 crc kubenswrapper[4911]: E0929 21:44:23.573935 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0e47d7a-2e01-4312-a5ac-e50cf19dc83f" containerName="ceilometer-central-agent" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.573954 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0e47d7a-2e01-4312-a5ac-e50cf19dc83f" containerName="ceilometer-central-agent" Sep 29 21:44:23 crc kubenswrapper[4911]: E0929 21:44:23.573976 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="620be3e0-40aa-43f0-aa16-9a54e7910bf9" containerName="nova-api-api" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.573987 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="620be3e0-40aa-43f0-aa16-9a54e7910bf9" containerName="nova-api-api" Sep 29 21:44:23 crc kubenswrapper[4911]: E0929 21:44:23.574011 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0e47d7a-2e01-4312-a5ac-e50cf19dc83f" containerName="proxy-httpd" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.574022 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0e47d7a-2e01-4312-a5ac-e50cf19dc83f" containerName="proxy-httpd" Sep 29 21:44:23 crc kubenswrapper[4911]: E0929 21:44:23.574046 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="620be3e0-40aa-43f0-aa16-9a54e7910bf9" containerName="nova-api-log" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.574057 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="620be3e0-40aa-43f0-aa16-9a54e7910bf9" containerName="nova-api-log" Sep 29 21:44:23 crc kubenswrapper[4911]: E0929 21:44:23.574075 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0e47d7a-2e01-4312-a5ac-e50cf19dc83f" containerName="sg-core" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.574087 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0e47d7a-2e01-4312-a5ac-e50cf19dc83f" containerName="sg-core" Sep 29 21:44:23 crc kubenswrapper[4911]: E0929 21:44:23.574114 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0e47d7a-2e01-4312-a5ac-e50cf19dc83f" containerName="ceilometer-notification-agent" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.574126 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0e47d7a-2e01-4312-a5ac-e50cf19dc83f" containerName="ceilometer-notification-agent" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.574423 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="620be3e0-40aa-43f0-aa16-9a54e7910bf9" containerName="nova-api-api" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.574445 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="f0e47d7a-2e01-4312-a5ac-e50cf19dc83f" containerName="ceilometer-central-agent" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.574465 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="f0e47d7a-2e01-4312-a5ac-e50cf19dc83f" containerName="ceilometer-notification-agent" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.574486 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="620be3e0-40aa-43f0-aa16-9a54e7910bf9" containerName="nova-api-log" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.574504 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="f0e47d7a-2e01-4312-a5ac-e50cf19dc83f" containerName="sg-core" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.574525 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="f0e47d7a-2e01-4312-a5ac-e50cf19dc83f" containerName="proxy-httpd" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.576158 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.579767 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.579987 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.579998 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.582471 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.587311 4911 scope.go:117] "RemoveContainer" containerID="a3cfda70a18c85f5908ff7c373064046ea0644133e3980fc5e0e203a924f445e" Sep 29 21:44:23 crc kubenswrapper[4911]: E0929 21:44:23.588304 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a3cfda70a18c85f5908ff7c373064046ea0644133e3980fc5e0e203a924f445e\": container with ID starting with a3cfda70a18c85f5908ff7c373064046ea0644133e3980fc5e0e203a924f445e not found: ID does not exist" containerID="a3cfda70a18c85f5908ff7c373064046ea0644133e3980fc5e0e203a924f445e" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.588372 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a3cfda70a18c85f5908ff7c373064046ea0644133e3980fc5e0e203a924f445e"} err="failed to get container status \"a3cfda70a18c85f5908ff7c373064046ea0644133e3980fc5e0e203a924f445e\": rpc error: code = NotFound desc = could not find container \"a3cfda70a18c85f5908ff7c373064046ea0644133e3980fc5e0e203a924f445e\": container with ID starting with a3cfda70a18c85f5908ff7c373064046ea0644133e3980fc5e0e203a924f445e not found: ID does not exist" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.588394 4911 scope.go:117] "RemoveContainer" containerID="dd6ca6948de14449ed846ae82303dee5b08744fa4f6de42040c729bd3792855e" Sep 29 21:44:23 crc kubenswrapper[4911]: E0929 21:44:23.594231 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dd6ca6948de14449ed846ae82303dee5b08744fa4f6de42040c729bd3792855e\": container with ID starting with dd6ca6948de14449ed846ae82303dee5b08744fa4f6de42040c729bd3792855e not found: ID does not exist" containerID="dd6ca6948de14449ed846ae82303dee5b08744fa4f6de42040c729bd3792855e" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.594279 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dd6ca6948de14449ed846ae82303dee5b08744fa4f6de42040c729bd3792855e"} err="failed to get container status \"dd6ca6948de14449ed846ae82303dee5b08744fa4f6de42040c729bd3792855e\": rpc error: code = NotFound desc = could not find container \"dd6ca6948de14449ed846ae82303dee5b08744fa4f6de42040c729bd3792855e\": container with ID starting with dd6ca6948de14449ed846ae82303dee5b08744fa4f6de42040c729bd3792855e not found: ID does not exist" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.594308 4911 scope.go:117] "RemoveContainer" containerID="f19d7cad8f8fc1541c097578af91ce3ba1fc2366c4558bae77b327014268349c" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.626995 4911 scope.go:117] "RemoveContainer" containerID="cd013e7fb578c8ebd09fb39657f183347c5d1c2738b7a07dc772144392e2f5c4" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.631012 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tvcnp\" (UniqueName: \"kubernetes.io/projected/1e871a2c-75f5-4449-8324-84a7ed255bf2-kube-api-access-tvcnp\") pod \"nova-api-0\" (UID: \"1e871a2c-75f5-4449-8324-84a7ed255bf2\") " pod="openstack/nova-api-0" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.631048 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e871a2c-75f5-4449-8324-84a7ed255bf2-public-tls-certs\") pod \"nova-api-0\" (UID: \"1e871a2c-75f5-4449-8324-84a7ed255bf2\") " pod="openstack/nova-api-0" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.631167 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1e871a2c-75f5-4449-8324-84a7ed255bf2-logs\") pod \"nova-api-0\" (UID: \"1e871a2c-75f5-4449-8324-84a7ed255bf2\") " pod="openstack/nova-api-0" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.631250 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1e871a2c-75f5-4449-8324-84a7ed255bf2-config-data\") pod \"nova-api-0\" (UID: \"1e871a2c-75f5-4449-8324-84a7ed255bf2\") " pod="openstack/nova-api-0" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.631294 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e871a2c-75f5-4449-8324-84a7ed255bf2-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"1e871a2c-75f5-4449-8324-84a7ed255bf2\") " pod="openstack/nova-api-0" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.631347 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e871a2c-75f5-4449-8324-84a7ed255bf2-internal-tls-certs\") pod \"nova-api-0\" (UID: \"1e871a2c-75f5-4449-8324-84a7ed255bf2\") " pod="openstack/nova-api-0" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.631442 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f0e47d7a-2e01-4312-a5ac-e50cf19dc83f-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.650592 4911 scope.go:117] "RemoveContainer" containerID="d126d151934ac7ee9bd8170490ea53bc016f3c464b7e08868aa4f6adcde9e8d4" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.674299 4911 scope.go:117] "RemoveContainer" containerID="4886d129aa8fe63ae3936ff0f109c24e3a458eaa8c023a27356ccaaebb9dc8f4" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.692647 4911 scope.go:117] "RemoveContainer" containerID="f19d7cad8f8fc1541c097578af91ce3ba1fc2366c4558bae77b327014268349c" Sep 29 21:44:23 crc kubenswrapper[4911]: E0929 21:44:23.693603 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f19d7cad8f8fc1541c097578af91ce3ba1fc2366c4558bae77b327014268349c\": container with ID starting with f19d7cad8f8fc1541c097578af91ce3ba1fc2366c4558bae77b327014268349c not found: ID does not exist" containerID="f19d7cad8f8fc1541c097578af91ce3ba1fc2366c4558bae77b327014268349c" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.693635 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f19d7cad8f8fc1541c097578af91ce3ba1fc2366c4558bae77b327014268349c"} err="failed to get container status \"f19d7cad8f8fc1541c097578af91ce3ba1fc2366c4558bae77b327014268349c\": rpc error: code = NotFound desc = could not find container \"f19d7cad8f8fc1541c097578af91ce3ba1fc2366c4558bae77b327014268349c\": container with ID starting with f19d7cad8f8fc1541c097578af91ce3ba1fc2366c4558bae77b327014268349c not found: ID does not exist" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.693658 4911 scope.go:117] "RemoveContainer" containerID="cd013e7fb578c8ebd09fb39657f183347c5d1c2738b7a07dc772144392e2f5c4" Sep 29 21:44:23 crc kubenswrapper[4911]: E0929 21:44:23.694027 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cd013e7fb578c8ebd09fb39657f183347c5d1c2738b7a07dc772144392e2f5c4\": container with ID starting with cd013e7fb578c8ebd09fb39657f183347c5d1c2738b7a07dc772144392e2f5c4 not found: ID does not exist" containerID="cd013e7fb578c8ebd09fb39657f183347c5d1c2738b7a07dc772144392e2f5c4" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.694069 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cd013e7fb578c8ebd09fb39657f183347c5d1c2738b7a07dc772144392e2f5c4"} err="failed to get container status \"cd013e7fb578c8ebd09fb39657f183347c5d1c2738b7a07dc772144392e2f5c4\": rpc error: code = NotFound desc = could not find container \"cd013e7fb578c8ebd09fb39657f183347c5d1c2738b7a07dc772144392e2f5c4\": container with ID starting with cd013e7fb578c8ebd09fb39657f183347c5d1c2738b7a07dc772144392e2f5c4 not found: ID does not exist" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.694097 4911 scope.go:117] "RemoveContainer" containerID="d126d151934ac7ee9bd8170490ea53bc016f3c464b7e08868aa4f6adcde9e8d4" Sep 29 21:44:23 crc kubenswrapper[4911]: E0929 21:44:23.694320 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d126d151934ac7ee9bd8170490ea53bc016f3c464b7e08868aa4f6adcde9e8d4\": container with ID starting with d126d151934ac7ee9bd8170490ea53bc016f3c464b7e08868aa4f6adcde9e8d4 not found: ID does not exist" containerID="d126d151934ac7ee9bd8170490ea53bc016f3c464b7e08868aa4f6adcde9e8d4" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.694364 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d126d151934ac7ee9bd8170490ea53bc016f3c464b7e08868aa4f6adcde9e8d4"} err="failed to get container status \"d126d151934ac7ee9bd8170490ea53bc016f3c464b7e08868aa4f6adcde9e8d4\": rpc error: code = NotFound desc = could not find container \"d126d151934ac7ee9bd8170490ea53bc016f3c464b7e08868aa4f6adcde9e8d4\": container with ID starting with d126d151934ac7ee9bd8170490ea53bc016f3c464b7e08868aa4f6adcde9e8d4 not found: ID does not exist" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.694381 4911 scope.go:117] "RemoveContainer" containerID="4886d129aa8fe63ae3936ff0f109c24e3a458eaa8c023a27356ccaaebb9dc8f4" Sep 29 21:44:23 crc kubenswrapper[4911]: E0929 21:44:23.694602 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4886d129aa8fe63ae3936ff0f109c24e3a458eaa8c023a27356ccaaebb9dc8f4\": container with ID starting with 4886d129aa8fe63ae3936ff0f109c24e3a458eaa8c023a27356ccaaebb9dc8f4 not found: ID does not exist" containerID="4886d129aa8fe63ae3936ff0f109c24e3a458eaa8c023a27356ccaaebb9dc8f4" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.694625 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4886d129aa8fe63ae3936ff0f109c24e3a458eaa8c023a27356ccaaebb9dc8f4"} err="failed to get container status \"4886d129aa8fe63ae3936ff0f109c24e3a458eaa8c023a27356ccaaebb9dc8f4\": rpc error: code = NotFound desc = could not find container \"4886d129aa8fe63ae3936ff0f109c24e3a458eaa8c023a27356ccaaebb9dc8f4\": container with ID starting with 4886d129aa8fe63ae3936ff0f109c24e3a458eaa8c023a27356ccaaebb9dc8f4 not found: ID does not exist" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.733784 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1e871a2c-75f5-4449-8324-84a7ed255bf2-logs\") pod \"nova-api-0\" (UID: \"1e871a2c-75f5-4449-8324-84a7ed255bf2\") " pod="openstack/nova-api-0" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.733868 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1e871a2c-75f5-4449-8324-84a7ed255bf2-config-data\") pod \"nova-api-0\" (UID: \"1e871a2c-75f5-4449-8324-84a7ed255bf2\") " pod="openstack/nova-api-0" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.733898 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e871a2c-75f5-4449-8324-84a7ed255bf2-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"1e871a2c-75f5-4449-8324-84a7ed255bf2\") " pod="openstack/nova-api-0" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.733931 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e871a2c-75f5-4449-8324-84a7ed255bf2-internal-tls-certs\") pod \"nova-api-0\" (UID: \"1e871a2c-75f5-4449-8324-84a7ed255bf2\") " pod="openstack/nova-api-0" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.733970 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tvcnp\" (UniqueName: \"kubernetes.io/projected/1e871a2c-75f5-4449-8324-84a7ed255bf2-kube-api-access-tvcnp\") pod \"nova-api-0\" (UID: \"1e871a2c-75f5-4449-8324-84a7ed255bf2\") " pod="openstack/nova-api-0" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.733989 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e871a2c-75f5-4449-8324-84a7ed255bf2-public-tls-certs\") pod \"nova-api-0\" (UID: \"1e871a2c-75f5-4449-8324-84a7ed255bf2\") " pod="openstack/nova-api-0" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.734328 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1e871a2c-75f5-4449-8324-84a7ed255bf2-logs\") pod \"nova-api-0\" (UID: \"1e871a2c-75f5-4449-8324-84a7ed255bf2\") " pod="openstack/nova-api-0" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.740018 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1e871a2c-75f5-4449-8324-84a7ed255bf2-config-data\") pod \"nova-api-0\" (UID: \"1e871a2c-75f5-4449-8324-84a7ed255bf2\") " pod="openstack/nova-api-0" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.740204 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e871a2c-75f5-4449-8324-84a7ed255bf2-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"1e871a2c-75f5-4449-8324-84a7ed255bf2\") " pod="openstack/nova-api-0" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.742384 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e871a2c-75f5-4449-8324-84a7ed255bf2-internal-tls-certs\") pod \"nova-api-0\" (UID: \"1e871a2c-75f5-4449-8324-84a7ed255bf2\") " pod="openstack/nova-api-0" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.742744 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e871a2c-75f5-4449-8324-84a7ed255bf2-public-tls-certs\") pod \"nova-api-0\" (UID: \"1e871a2c-75f5-4449-8324-84a7ed255bf2\") " pod="openstack/nova-api-0" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.752498 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tvcnp\" (UniqueName: \"kubernetes.io/projected/1e871a2c-75f5-4449-8324-84a7ed255bf2-kube-api-access-tvcnp\") pod \"nova-api-0\" (UID: \"1e871a2c-75f5-4449-8324-84a7ed255bf2\") " pod="openstack/nova-api-0" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.843321 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.856136 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.866533 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.869774 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.872308 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.873496 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.873737 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.884268 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.898515 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.939311 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7\") " pod="openstack/ceilometer-0" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.939408 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tsx7g\" (UniqueName: \"kubernetes.io/projected/0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7-kube-api-access-tsx7g\") pod \"ceilometer-0\" (UID: \"0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7\") " pod="openstack/ceilometer-0" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.939461 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7-run-httpd\") pod \"ceilometer-0\" (UID: \"0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7\") " pod="openstack/ceilometer-0" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.939497 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7-log-httpd\") pod \"ceilometer-0\" (UID: \"0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7\") " pod="openstack/ceilometer-0" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.939594 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7\") " pod="openstack/ceilometer-0" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.939686 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7-config-data\") pod \"ceilometer-0\" (UID: \"0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7\") " pod="openstack/ceilometer-0" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.939944 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7-scripts\") pod \"ceilometer-0\" (UID: \"0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7\") " pod="openstack/ceilometer-0" Sep 29 21:44:23 crc kubenswrapper[4911]: I0929 21:44:23.940011 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7\") " pod="openstack/ceilometer-0" Sep 29 21:44:24 crc kubenswrapper[4911]: I0929 21:44:24.042662 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tsx7g\" (UniqueName: \"kubernetes.io/projected/0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7-kube-api-access-tsx7g\") pod \"ceilometer-0\" (UID: \"0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7\") " pod="openstack/ceilometer-0" Sep 29 21:44:24 crc kubenswrapper[4911]: I0929 21:44:24.043038 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7-run-httpd\") pod \"ceilometer-0\" (UID: \"0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7\") " pod="openstack/ceilometer-0" Sep 29 21:44:24 crc kubenswrapper[4911]: I0929 21:44:24.043099 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7-log-httpd\") pod \"ceilometer-0\" (UID: \"0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7\") " pod="openstack/ceilometer-0" Sep 29 21:44:24 crc kubenswrapper[4911]: I0929 21:44:24.043130 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7\") " pod="openstack/ceilometer-0" Sep 29 21:44:24 crc kubenswrapper[4911]: I0929 21:44:24.043192 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7-config-data\") pod \"ceilometer-0\" (UID: \"0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7\") " pod="openstack/ceilometer-0" Sep 29 21:44:24 crc kubenswrapper[4911]: I0929 21:44:24.043508 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7-scripts\") pod \"ceilometer-0\" (UID: \"0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7\") " pod="openstack/ceilometer-0" Sep 29 21:44:24 crc kubenswrapper[4911]: I0929 21:44:24.043536 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7\") " pod="openstack/ceilometer-0" Sep 29 21:44:24 crc kubenswrapper[4911]: I0929 21:44:24.043585 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7\") " pod="openstack/ceilometer-0" Sep 29 21:44:24 crc kubenswrapper[4911]: I0929 21:44:24.043597 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7-run-httpd\") pod \"ceilometer-0\" (UID: \"0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7\") " pod="openstack/ceilometer-0" Sep 29 21:44:24 crc kubenswrapper[4911]: I0929 21:44:24.044198 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7-log-httpd\") pod \"ceilometer-0\" (UID: \"0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7\") " pod="openstack/ceilometer-0" Sep 29 21:44:24 crc kubenswrapper[4911]: I0929 21:44:24.048729 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7\") " pod="openstack/ceilometer-0" Sep 29 21:44:24 crc kubenswrapper[4911]: I0929 21:44:24.049247 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7-config-data\") pod \"ceilometer-0\" (UID: \"0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7\") " pod="openstack/ceilometer-0" Sep 29 21:44:24 crc kubenswrapper[4911]: I0929 21:44:24.049963 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7-scripts\") pod \"ceilometer-0\" (UID: \"0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7\") " pod="openstack/ceilometer-0" Sep 29 21:44:24 crc kubenswrapper[4911]: I0929 21:44:24.050271 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7\") " pod="openstack/ceilometer-0" Sep 29 21:44:24 crc kubenswrapper[4911]: I0929 21:44:24.062454 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7\") " pod="openstack/ceilometer-0" Sep 29 21:44:24 crc kubenswrapper[4911]: I0929 21:44:24.062912 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tsx7g\" (UniqueName: \"kubernetes.io/projected/0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7-kube-api-access-tsx7g\") pod \"ceilometer-0\" (UID: \"0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7\") " pod="openstack/ceilometer-0" Sep 29 21:44:24 crc kubenswrapper[4911]: I0929 21:44:24.212707 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 21:44:24 crc kubenswrapper[4911]: I0929 21:44:24.342427 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 29 21:44:24 crc kubenswrapper[4911]: W0929 21:44:24.348418 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1e871a2c_75f5_4449_8324_84a7ed255bf2.slice/crio-8436265b1add2b9f3fd1c3599d311e258f72b014851893b20cdbeef936bfff3d WatchSource:0}: Error finding container 8436265b1add2b9f3fd1c3599d311e258f72b014851893b20cdbeef936bfff3d: Status 404 returned error can't find the container with id 8436265b1add2b9f3fd1c3599d311e258f72b014851893b20cdbeef936bfff3d Sep 29 21:44:24 crc kubenswrapper[4911]: I0929 21:44:24.511052 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"1e871a2c-75f5-4449-8324-84a7ed255bf2","Type":"ContainerStarted","Data":"8436265b1add2b9f3fd1c3599d311e258f72b014851893b20cdbeef936bfff3d"} Sep 29 21:44:24 crc kubenswrapper[4911]: I0929 21:44:24.698652 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 21:44:24 crc kubenswrapper[4911]: I0929 21:44:24.711920 4911 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 21:44:24 crc kubenswrapper[4911]: I0929 21:44:24.724048 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="620be3e0-40aa-43f0-aa16-9a54e7910bf9" path="/var/lib/kubelet/pods/620be3e0-40aa-43f0-aa16-9a54e7910bf9/volumes" Sep 29 21:44:24 crc kubenswrapper[4911]: I0929 21:44:24.724656 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f0e47d7a-2e01-4312-a5ac-e50cf19dc83f" path="/var/lib/kubelet/pods/f0e47d7a-2e01-4312-a5ac-e50cf19dc83f/volumes" Sep 29 21:44:25 crc kubenswrapper[4911]: I0929 21:44:25.525613 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7","Type":"ContainerStarted","Data":"273d75f7624d8845f71b4dfc3cef007bb1fc5786c51a91b92e6cf6d1d3463739"} Sep 29 21:44:25 crc kubenswrapper[4911]: I0929 21:44:25.526082 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7","Type":"ContainerStarted","Data":"e506de0974e98186f7421c88fdf6360fbed54cb7de5ffc4e62239dcb668b400e"} Sep 29 21:44:25 crc kubenswrapper[4911]: I0929 21:44:25.527753 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"1e871a2c-75f5-4449-8324-84a7ed255bf2","Type":"ContainerStarted","Data":"d3082b58c363cd321e8435a6bac6fbe05b458f2d3ab206d01d45d4ec8ec7c12c"} Sep 29 21:44:25 crc kubenswrapper[4911]: I0929 21:44:25.527816 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"1e871a2c-75f5-4449-8324-84a7ed255bf2","Type":"ContainerStarted","Data":"1d60b62cb03c4ea768c2bff3e708a73523320e1da167aecb75dc993954f42922"} Sep 29 21:44:25 crc kubenswrapper[4911]: I0929 21:44:25.549399 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.54938184 podStartE2EDuration="2.54938184s" podCreationTimestamp="2025-09-29 21:44:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:44:25.546848622 +0000 UTC m=+1143.523961293" watchObservedRunningTime="2025-09-29 21:44:25.54938184 +0000 UTC m=+1143.526494511" Sep 29 21:44:25 crc kubenswrapper[4911]: I0929 21:44:25.773517 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Sep 29 21:44:25 crc kubenswrapper[4911]: I0929 21:44:25.795518 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Sep 29 21:44:26 crc kubenswrapper[4911]: I0929 21:44:26.539742 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7","Type":"ContainerStarted","Data":"9f4d06a638604e97771791a805949518fe058a028ea83e723afa00eb779a5ebc"} Sep 29 21:44:26 crc kubenswrapper[4911]: I0929 21:44:26.572033 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Sep 29 21:44:26 crc kubenswrapper[4911]: I0929 21:44:26.769787 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-xfpnw"] Sep 29 21:44:26 crc kubenswrapper[4911]: I0929 21:44:26.772854 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-xfpnw" Sep 29 21:44:26 crc kubenswrapper[4911]: I0929 21:44:26.776620 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Sep 29 21:44:26 crc kubenswrapper[4911]: I0929 21:44:26.776832 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Sep 29 21:44:26 crc kubenswrapper[4911]: I0929 21:44:26.790597 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-xfpnw"] Sep 29 21:44:26 crc kubenswrapper[4911]: I0929 21:44:26.894856 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8feb84b9-2d26-49c2-b4cd-6504c42773e6-config-data\") pod \"nova-cell1-cell-mapping-xfpnw\" (UID: \"8feb84b9-2d26-49c2-b4cd-6504c42773e6\") " pod="openstack/nova-cell1-cell-mapping-xfpnw" Sep 29 21:44:26 crc kubenswrapper[4911]: I0929 21:44:26.895312 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-njs49\" (UniqueName: \"kubernetes.io/projected/8feb84b9-2d26-49c2-b4cd-6504c42773e6-kube-api-access-njs49\") pod \"nova-cell1-cell-mapping-xfpnw\" (UID: \"8feb84b9-2d26-49c2-b4cd-6504c42773e6\") " pod="openstack/nova-cell1-cell-mapping-xfpnw" Sep 29 21:44:26 crc kubenswrapper[4911]: I0929 21:44:26.895460 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8feb84b9-2d26-49c2-b4cd-6504c42773e6-scripts\") pod \"nova-cell1-cell-mapping-xfpnw\" (UID: \"8feb84b9-2d26-49c2-b4cd-6504c42773e6\") " pod="openstack/nova-cell1-cell-mapping-xfpnw" Sep 29 21:44:26 crc kubenswrapper[4911]: I0929 21:44:26.895681 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8feb84b9-2d26-49c2-b4cd-6504c42773e6-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-xfpnw\" (UID: \"8feb84b9-2d26-49c2-b4cd-6504c42773e6\") " pod="openstack/nova-cell1-cell-mapping-xfpnw" Sep 29 21:44:26 crc kubenswrapper[4911]: I0929 21:44:26.997018 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8feb84b9-2d26-49c2-b4cd-6504c42773e6-config-data\") pod \"nova-cell1-cell-mapping-xfpnw\" (UID: \"8feb84b9-2d26-49c2-b4cd-6504c42773e6\") " pod="openstack/nova-cell1-cell-mapping-xfpnw" Sep 29 21:44:26 crc kubenswrapper[4911]: I0929 21:44:26.997095 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-njs49\" (UniqueName: \"kubernetes.io/projected/8feb84b9-2d26-49c2-b4cd-6504c42773e6-kube-api-access-njs49\") pod \"nova-cell1-cell-mapping-xfpnw\" (UID: \"8feb84b9-2d26-49c2-b4cd-6504c42773e6\") " pod="openstack/nova-cell1-cell-mapping-xfpnw" Sep 29 21:44:26 crc kubenswrapper[4911]: I0929 21:44:26.997127 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8feb84b9-2d26-49c2-b4cd-6504c42773e6-scripts\") pod \"nova-cell1-cell-mapping-xfpnw\" (UID: \"8feb84b9-2d26-49c2-b4cd-6504c42773e6\") " pod="openstack/nova-cell1-cell-mapping-xfpnw" Sep 29 21:44:26 crc kubenswrapper[4911]: I0929 21:44:26.997175 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8feb84b9-2d26-49c2-b4cd-6504c42773e6-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-xfpnw\" (UID: \"8feb84b9-2d26-49c2-b4cd-6504c42773e6\") " pod="openstack/nova-cell1-cell-mapping-xfpnw" Sep 29 21:44:27 crc kubenswrapper[4911]: I0929 21:44:27.001725 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8feb84b9-2d26-49c2-b4cd-6504c42773e6-config-data\") pod \"nova-cell1-cell-mapping-xfpnw\" (UID: \"8feb84b9-2d26-49c2-b4cd-6504c42773e6\") " pod="openstack/nova-cell1-cell-mapping-xfpnw" Sep 29 21:44:27 crc kubenswrapper[4911]: I0929 21:44:27.001872 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8feb84b9-2d26-49c2-b4cd-6504c42773e6-scripts\") pod \"nova-cell1-cell-mapping-xfpnw\" (UID: \"8feb84b9-2d26-49c2-b4cd-6504c42773e6\") " pod="openstack/nova-cell1-cell-mapping-xfpnw" Sep 29 21:44:27 crc kubenswrapper[4911]: I0929 21:44:27.002631 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8feb84b9-2d26-49c2-b4cd-6504c42773e6-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-xfpnw\" (UID: \"8feb84b9-2d26-49c2-b4cd-6504c42773e6\") " pod="openstack/nova-cell1-cell-mapping-xfpnw" Sep 29 21:44:27 crc kubenswrapper[4911]: I0929 21:44:27.016268 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-njs49\" (UniqueName: \"kubernetes.io/projected/8feb84b9-2d26-49c2-b4cd-6504c42773e6-kube-api-access-njs49\") pod \"nova-cell1-cell-mapping-xfpnw\" (UID: \"8feb84b9-2d26-49c2-b4cd-6504c42773e6\") " pod="openstack/nova-cell1-cell-mapping-xfpnw" Sep 29 21:44:27 crc kubenswrapper[4911]: I0929 21:44:27.106586 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-xfpnw" Sep 29 21:44:27 crc kubenswrapper[4911]: I0929 21:44:27.454716 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-59cf4bdb65-j2scw" Sep 29 21:44:27 crc kubenswrapper[4911]: I0929 21:44:27.512426 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-2srf4"] Sep 29 21:44:27 crc kubenswrapper[4911]: I0929 21:44:27.512689 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-845d6d6f59-2srf4" podUID="3046cee5-d66c-4a66-bed7-2f4a36df4113" containerName="dnsmasq-dns" containerID="cri-o://c5694d357d722d62651238ed887242d75cb687ab76e074f09f99103f990d312f" gracePeriod=10 Sep 29 21:44:27 crc kubenswrapper[4911]: I0929 21:44:27.562062 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7","Type":"ContainerStarted","Data":"eff336afb6da0f21d3ae1eadfaaa8ab3146491a84692faeaabdd537ff2a2e985"} Sep 29 21:44:27 crc kubenswrapper[4911]: I0929 21:44:27.612947 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-xfpnw"] Sep 29 21:44:27 crc kubenswrapper[4911]: I0929 21:44:27.936412 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-845d6d6f59-2srf4" Sep 29 21:44:28 crc kubenswrapper[4911]: I0929 21:44:28.020691 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3046cee5-d66c-4a66-bed7-2f4a36df4113-ovsdbserver-nb\") pod \"3046cee5-d66c-4a66-bed7-2f4a36df4113\" (UID: \"3046cee5-d66c-4a66-bed7-2f4a36df4113\") " Sep 29 21:44:28 crc kubenswrapper[4911]: I0929 21:44:28.020753 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3046cee5-d66c-4a66-bed7-2f4a36df4113-dns-swift-storage-0\") pod \"3046cee5-d66c-4a66-bed7-2f4a36df4113\" (UID: \"3046cee5-d66c-4a66-bed7-2f4a36df4113\") " Sep 29 21:44:28 crc kubenswrapper[4911]: I0929 21:44:28.020861 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3046cee5-d66c-4a66-bed7-2f4a36df4113-ovsdbserver-sb\") pod \"3046cee5-d66c-4a66-bed7-2f4a36df4113\" (UID: \"3046cee5-d66c-4a66-bed7-2f4a36df4113\") " Sep 29 21:44:28 crc kubenswrapper[4911]: I0929 21:44:28.020881 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bjvjx\" (UniqueName: \"kubernetes.io/projected/3046cee5-d66c-4a66-bed7-2f4a36df4113-kube-api-access-bjvjx\") pod \"3046cee5-d66c-4a66-bed7-2f4a36df4113\" (UID: \"3046cee5-d66c-4a66-bed7-2f4a36df4113\") " Sep 29 21:44:28 crc kubenswrapper[4911]: I0929 21:44:28.021016 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3046cee5-d66c-4a66-bed7-2f4a36df4113-dns-svc\") pod \"3046cee5-d66c-4a66-bed7-2f4a36df4113\" (UID: \"3046cee5-d66c-4a66-bed7-2f4a36df4113\") " Sep 29 21:44:28 crc kubenswrapper[4911]: I0929 21:44:28.021082 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3046cee5-d66c-4a66-bed7-2f4a36df4113-config\") pod \"3046cee5-d66c-4a66-bed7-2f4a36df4113\" (UID: \"3046cee5-d66c-4a66-bed7-2f4a36df4113\") " Sep 29 21:44:28 crc kubenswrapper[4911]: I0929 21:44:28.029991 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3046cee5-d66c-4a66-bed7-2f4a36df4113-kube-api-access-bjvjx" (OuterVolumeSpecName: "kube-api-access-bjvjx") pod "3046cee5-d66c-4a66-bed7-2f4a36df4113" (UID: "3046cee5-d66c-4a66-bed7-2f4a36df4113"). InnerVolumeSpecName "kube-api-access-bjvjx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:44:28 crc kubenswrapper[4911]: I0929 21:44:28.100871 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3046cee5-d66c-4a66-bed7-2f4a36df4113-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "3046cee5-d66c-4a66-bed7-2f4a36df4113" (UID: "3046cee5-d66c-4a66-bed7-2f4a36df4113"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:44:28 crc kubenswrapper[4911]: I0929 21:44:28.104289 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3046cee5-d66c-4a66-bed7-2f4a36df4113-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "3046cee5-d66c-4a66-bed7-2f4a36df4113" (UID: "3046cee5-d66c-4a66-bed7-2f4a36df4113"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:44:28 crc kubenswrapper[4911]: I0929 21:44:28.118170 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3046cee5-d66c-4a66-bed7-2f4a36df4113-config" (OuterVolumeSpecName: "config") pod "3046cee5-d66c-4a66-bed7-2f4a36df4113" (UID: "3046cee5-d66c-4a66-bed7-2f4a36df4113"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:44:28 crc kubenswrapper[4911]: I0929 21:44:28.123457 4911 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3046cee5-d66c-4a66-bed7-2f4a36df4113-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 21:44:28 crc kubenswrapper[4911]: I0929 21:44:28.123492 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bjvjx\" (UniqueName: \"kubernetes.io/projected/3046cee5-d66c-4a66-bed7-2f4a36df4113-kube-api-access-bjvjx\") on node \"crc\" DevicePath \"\"" Sep 29 21:44:28 crc kubenswrapper[4911]: I0929 21:44:28.123508 4911 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3046cee5-d66c-4a66-bed7-2f4a36df4113-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 21:44:28 crc kubenswrapper[4911]: I0929 21:44:28.123520 4911 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3046cee5-d66c-4a66-bed7-2f4a36df4113-config\") on node \"crc\" DevicePath \"\"" Sep 29 21:44:28 crc kubenswrapper[4911]: I0929 21:44:28.139416 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3046cee5-d66c-4a66-bed7-2f4a36df4113-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "3046cee5-d66c-4a66-bed7-2f4a36df4113" (UID: "3046cee5-d66c-4a66-bed7-2f4a36df4113"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:44:28 crc kubenswrapper[4911]: I0929 21:44:28.158471 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3046cee5-d66c-4a66-bed7-2f4a36df4113-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "3046cee5-d66c-4a66-bed7-2f4a36df4113" (UID: "3046cee5-d66c-4a66-bed7-2f4a36df4113"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:44:28 crc kubenswrapper[4911]: I0929 21:44:28.224986 4911 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3046cee5-d66c-4a66-bed7-2f4a36df4113-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 29 21:44:28 crc kubenswrapper[4911]: I0929 21:44:28.225017 4911 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3046cee5-d66c-4a66-bed7-2f4a36df4113-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 21:44:28 crc kubenswrapper[4911]: I0929 21:44:28.570451 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7","Type":"ContainerStarted","Data":"5cb5e22da0b1c37229ae817713095476a189fb658efca8fd730103319efff137"} Sep 29 21:44:28 crc kubenswrapper[4911]: I0929 21:44:28.571573 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 29 21:44:28 crc kubenswrapper[4911]: I0929 21:44:28.573339 4911 generic.go:334] "Generic (PLEG): container finished" podID="3046cee5-d66c-4a66-bed7-2f4a36df4113" containerID="c5694d357d722d62651238ed887242d75cb687ab76e074f09f99103f990d312f" exitCode=0 Sep 29 21:44:28 crc kubenswrapper[4911]: I0929 21:44:28.573379 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845d6d6f59-2srf4" event={"ID":"3046cee5-d66c-4a66-bed7-2f4a36df4113","Type":"ContainerDied","Data":"c5694d357d722d62651238ed887242d75cb687ab76e074f09f99103f990d312f"} Sep 29 21:44:28 crc kubenswrapper[4911]: I0929 21:44:28.573398 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845d6d6f59-2srf4" event={"ID":"3046cee5-d66c-4a66-bed7-2f4a36df4113","Type":"ContainerDied","Data":"0af6f1781c8ae1bc7264ff5db1ce6b25a27d0c6617ec83387794708d00bc722e"} Sep 29 21:44:28 crc kubenswrapper[4911]: I0929 21:44:28.573414 4911 scope.go:117] "RemoveContainer" containerID="c5694d357d722d62651238ed887242d75cb687ab76e074f09f99103f990d312f" Sep 29 21:44:28 crc kubenswrapper[4911]: I0929 21:44:28.573513 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-845d6d6f59-2srf4" Sep 29 21:44:28 crc kubenswrapper[4911]: I0929 21:44:28.580814 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-xfpnw" event={"ID":"8feb84b9-2d26-49c2-b4cd-6504c42773e6","Type":"ContainerStarted","Data":"842ee730a19ce701299917a4cd6d7897718662ab5879e435a4ae7295af1993db"} Sep 29 21:44:28 crc kubenswrapper[4911]: I0929 21:44:28.580847 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-xfpnw" event={"ID":"8feb84b9-2d26-49c2-b4cd-6504c42773e6","Type":"ContainerStarted","Data":"19e14849c8b977000e98227191226b7c8f4d719d2c81f0eb6a55b07036ffdbef"} Sep 29 21:44:28 crc kubenswrapper[4911]: I0929 21:44:28.601968 4911 scope.go:117] "RemoveContainer" containerID="263923d068d9cf0997a5cbbb0415219e28ee27d137c5bc4e826fd3c3d361a89b" Sep 29 21:44:28 crc kubenswrapper[4911]: I0929 21:44:28.622578 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.268288045 podStartE2EDuration="5.622556089s" podCreationTimestamp="2025-09-29 21:44:23 +0000 UTC" firstStartedPulling="2025-09-29 21:44:24.711730438 +0000 UTC m=+1142.688843099" lastFinishedPulling="2025-09-29 21:44:28.065998472 +0000 UTC m=+1146.043111143" observedRunningTime="2025-09-29 21:44:28.597829041 +0000 UTC m=+1146.574941742" watchObservedRunningTime="2025-09-29 21:44:28.622556089 +0000 UTC m=+1146.599668760" Sep 29 21:44:28 crc kubenswrapper[4911]: I0929 21:44:28.641574 4911 scope.go:117] "RemoveContainer" containerID="c5694d357d722d62651238ed887242d75cb687ab76e074f09f99103f990d312f" Sep 29 21:44:28 crc kubenswrapper[4911]: E0929 21:44:28.643176 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c5694d357d722d62651238ed887242d75cb687ab76e074f09f99103f990d312f\": container with ID starting with c5694d357d722d62651238ed887242d75cb687ab76e074f09f99103f990d312f not found: ID does not exist" containerID="c5694d357d722d62651238ed887242d75cb687ab76e074f09f99103f990d312f" Sep 29 21:44:28 crc kubenswrapper[4911]: I0929 21:44:28.643212 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c5694d357d722d62651238ed887242d75cb687ab76e074f09f99103f990d312f"} err="failed to get container status \"c5694d357d722d62651238ed887242d75cb687ab76e074f09f99103f990d312f\": rpc error: code = NotFound desc = could not find container \"c5694d357d722d62651238ed887242d75cb687ab76e074f09f99103f990d312f\": container with ID starting with c5694d357d722d62651238ed887242d75cb687ab76e074f09f99103f990d312f not found: ID does not exist" Sep 29 21:44:28 crc kubenswrapper[4911]: I0929 21:44:28.643240 4911 scope.go:117] "RemoveContainer" containerID="263923d068d9cf0997a5cbbb0415219e28ee27d137c5bc4e826fd3c3d361a89b" Sep 29 21:44:28 crc kubenswrapper[4911]: E0929 21:44:28.643508 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"263923d068d9cf0997a5cbbb0415219e28ee27d137c5bc4e826fd3c3d361a89b\": container with ID starting with 263923d068d9cf0997a5cbbb0415219e28ee27d137c5bc4e826fd3c3d361a89b not found: ID does not exist" containerID="263923d068d9cf0997a5cbbb0415219e28ee27d137c5bc4e826fd3c3d361a89b" Sep 29 21:44:28 crc kubenswrapper[4911]: I0929 21:44:28.643535 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"263923d068d9cf0997a5cbbb0415219e28ee27d137c5bc4e826fd3c3d361a89b"} err="failed to get container status \"263923d068d9cf0997a5cbbb0415219e28ee27d137c5bc4e826fd3c3d361a89b\": rpc error: code = NotFound desc = could not find container \"263923d068d9cf0997a5cbbb0415219e28ee27d137c5bc4e826fd3c3d361a89b\": container with ID starting with 263923d068d9cf0997a5cbbb0415219e28ee27d137c5bc4e826fd3c3d361a89b not found: ID does not exist" Sep 29 21:44:28 crc kubenswrapper[4911]: I0929 21:44:28.644331 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-xfpnw" podStartSLOduration=2.6443126550000002 podStartE2EDuration="2.644312655s" podCreationTimestamp="2025-09-29 21:44:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:44:28.618436411 +0000 UTC m=+1146.595549112" watchObservedRunningTime="2025-09-29 21:44:28.644312655 +0000 UTC m=+1146.621425326" Sep 29 21:44:28 crc kubenswrapper[4911]: I0929 21:44:28.655673 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-2srf4"] Sep 29 21:44:28 crc kubenswrapper[4911]: I0929 21:44:28.665684 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-2srf4"] Sep 29 21:44:28 crc kubenswrapper[4911]: I0929 21:44:28.714637 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3046cee5-d66c-4a66-bed7-2f4a36df4113" path="/var/lib/kubelet/pods/3046cee5-d66c-4a66-bed7-2f4a36df4113/volumes" Sep 29 21:44:32 crc kubenswrapper[4911]: I0929 21:44:32.636727 4911 generic.go:334] "Generic (PLEG): container finished" podID="8feb84b9-2d26-49c2-b4cd-6504c42773e6" containerID="842ee730a19ce701299917a4cd6d7897718662ab5879e435a4ae7295af1993db" exitCode=0 Sep 29 21:44:32 crc kubenswrapper[4911]: I0929 21:44:32.637322 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-xfpnw" event={"ID":"8feb84b9-2d26-49c2-b4cd-6504c42773e6","Type":"ContainerDied","Data":"842ee730a19ce701299917a4cd6d7897718662ab5879e435a4ae7295af1993db"} Sep 29 21:44:33 crc kubenswrapper[4911]: I0929 21:44:33.911396 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 29 21:44:33 crc kubenswrapper[4911]: I0929 21:44:33.912034 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 29 21:44:34 crc kubenswrapper[4911]: I0929 21:44:34.124438 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-xfpnw" Sep 29 21:44:34 crc kubenswrapper[4911]: I0929 21:44:34.272480 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8feb84b9-2d26-49c2-b4cd-6504c42773e6-config-data\") pod \"8feb84b9-2d26-49c2-b4cd-6504c42773e6\" (UID: \"8feb84b9-2d26-49c2-b4cd-6504c42773e6\") " Sep 29 21:44:34 crc kubenswrapper[4911]: I0929 21:44:34.272626 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8feb84b9-2d26-49c2-b4cd-6504c42773e6-combined-ca-bundle\") pod \"8feb84b9-2d26-49c2-b4cd-6504c42773e6\" (UID: \"8feb84b9-2d26-49c2-b4cd-6504c42773e6\") " Sep 29 21:44:34 crc kubenswrapper[4911]: I0929 21:44:34.272689 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8feb84b9-2d26-49c2-b4cd-6504c42773e6-scripts\") pod \"8feb84b9-2d26-49c2-b4cd-6504c42773e6\" (UID: \"8feb84b9-2d26-49c2-b4cd-6504c42773e6\") " Sep 29 21:44:34 crc kubenswrapper[4911]: I0929 21:44:34.272811 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-njs49\" (UniqueName: \"kubernetes.io/projected/8feb84b9-2d26-49c2-b4cd-6504c42773e6-kube-api-access-njs49\") pod \"8feb84b9-2d26-49c2-b4cd-6504c42773e6\" (UID: \"8feb84b9-2d26-49c2-b4cd-6504c42773e6\") " Sep 29 21:44:34 crc kubenswrapper[4911]: I0929 21:44:34.282308 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8feb84b9-2d26-49c2-b4cd-6504c42773e6-kube-api-access-njs49" (OuterVolumeSpecName: "kube-api-access-njs49") pod "8feb84b9-2d26-49c2-b4cd-6504c42773e6" (UID: "8feb84b9-2d26-49c2-b4cd-6504c42773e6"). InnerVolumeSpecName "kube-api-access-njs49". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:44:34 crc kubenswrapper[4911]: I0929 21:44:34.283978 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8feb84b9-2d26-49c2-b4cd-6504c42773e6-scripts" (OuterVolumeSpecName: "scripts") pod "8feb84b9-2d26-49c2-b4cd-6504c42773e6" (UID: "8feb84b9-2d26-49c2-b4cd-6504c42773e6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:44:34 crc kubenswrapper[4911]: I0929 21:44:34.324166 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8feb84b9-2d26-49c2-b4cd-6504c42773e6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8feb84b9-2d26-49c2-b4cd-6504c42773e6" (UID: "8feb84b9-2d26-49c2-b4cd-6504c42773e6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:44:34 crc kubenswrapper[4911]: I0929 21:44:34.332754 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8feb84b9-2d26-49c2-b4cd-6504c42773e6-config-data" (OuterVolumeSpecName: "config-data") pod "8feb84b9-2d26-49c2-b4cd-6504c42773e6" (UID: "8feb84b9-2d26-49c2-b4cd-6504c42773e6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:44:34 crc kubenswrapper[4911]: I0929 21:44:34.375068 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8feb84b9-2d26-49c2-b4cd-6504c42773e6-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 21:44:34 crc kubenswrapper[4911]: I0929 21:44:34.375111 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8feb84b9-2d26-49c2-b4cd-6504c42773e6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 21:44:34 crc kubenswrapper[4911]: I0929 21:44:34.375131 4911 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8feb84b9-2d26-49c2-b4cd-6504c42773e6-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 21:44:34 crc kubenswrapper[4911]: I0929 21:44:34.375148 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-njs49\" (UniqueName: \"kubernetes.io/projected/8feb84b9-2d26-49c2-b4cd-6504c42773e6-kube-api-access-njs49\") on node \"crc\" DevicePath \"\"" Sep 29 21:44:34 crc kubenswrapper[4911]: I0929 21:44:34.666254 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-xfpnw" event={"ID":"8feb84b9-2d26-49c2-b4cd-6504c42773e6","Type":"ContainerDied","Data":"19e14849c8b977000e98227191226b7c8f4d719d2c81f0eb6a55b07036ffdbef"} Sep 29 21:44:34 crc kubenswrapper[4911]: I0929 21:44:34.667646 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="19e14849c8b977000e98227191226b7c8f4d719d2c81f0eb6a55b07036ffdbef" Sep 29 21:44:34 crc kubenswrapper[4911]: I0929 21:44:34.666374 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-xfpnw" Sep 29 21:44:34 crc kubenswrapper[4911]: I0929 21:44:34.851138 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 29 21:44:34 crc kubenswrapper[4911]: I0929 21:44:34.851367 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="1e871a2c-75f5-4449-8324-84a7ed255bf2" containerName="nova-api-log" containerID="cri-o://1d60b62cb03c4ea768c2bff3e708a73523320e1da167aecb75dc993954f42922" gracePeriod=30 Sep 29 21:44:34 crc kubenswrapper[4911]: I0929 21:44:34.851429 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="1e871a2c-75f5-4449-8324-84a7ed255bf2" containerName="nova-api-api" containerID="cri-o://d3082b58c363cd321e8435a6bac6fbe05b458f2d3ab206d01d45d4ec8ec7c12c" gracePeriod=30 Sep 29 21:44:34 crc kubenswrapper[4911]: I0929 21:44:34.862764 4911 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="1e871a2c-75f5-4449-8324-84a7ed255bf2" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.200:8774/\": EOF" Sep 29 21:44:34 crc kubenswrapper[4911]: I0929 21:44:34.870083 4911 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="1e871a2c-75f5-4449-8324-84a7ed255bf2" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.200:8774/\": EOF" Sep 29 21:44:34 crc kubenswrapper[4911]: I0929 21:44:34.876070 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 21:44:34 crc kubenswrapper[4911]: I0929 21:44:34.876329 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="4625af6e-56d3-4c1e-bac1-1ba58400e121" containerName="nova-scheduler-scheduler" containerID="cri-o://536eb3ebe43fffa2e7fcb559b3c2271b8c722bf6e77c1c9dc7d5b845ee7bdc59" gracePeriod=30 Sep 29 21:44:34 crc kubenswrapper[4911]: I0929 21:44:34.914496 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 21:44:34 crc kubenswrapper[4911]: I0929 21:44:34.914950 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="d5e82b41-14f5-4503-a919-e9ea37d98ead" containerName="nova-metadata-log" containerID="cri-o://9e398e40eee9096c78e88b82b35278cb275b6a72012e7b3d6ffde9581f2bc329" gracePeriod=30 Sep 29 21:44:34 crc kubenswrapper[4911]: I0929 21:44:34.915219 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="d5e82b41-14f5-4503-a919-e9ea37d98ead" containerName="nova-metadata-metadata" containerID="cri-o://e19a0699c90aeb6e8fde803dee2d962f454faaa447c67dd6aede907fce66da78" gracePeriod=30 Sep 29 21:44:35 crc kubenswrapper[4911]: E0929 21:44:35.604356 4911 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="536eb3ebe43fffa2e7fcb559b3c2271b8c722bf6e77c1c9dc7d5b845ee7bdc59" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 29 21:44:35 crc kubenswrapper[4911]: E0929 21:44:35.610191 4911 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="536eb3ebe43fffa2e7fcb559b3c2271b8c722bf6e77c1c9dc7d5b845ee7bdc59" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 29 21:44:35 crc kubenswrapper[4911]: E0929 21:44:35.632332 4911 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="536eb3ebe43fffa2e7fcb559b3c2271b8c722bf6e77c1c9dc7d5b845ee7bdc59" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 29 21:44:35 crc kubenswrapper[4911]: E0929 21:44:35.632419 4911 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="4625af6e-56d3-4c1e-bac1-1ba58400e121" containerName="nova-scheduler-scheduler" Sep 29 21:44:35 crc kubenswrapper[4911]: I0929 21:44:35.677835 4911 generic.go:334] "Generic (PLEG): container finished" podID="1e871a2c-75f5-4449-8324-84a7ed255bf2" containerID="1d60b62cb03c4ea768c2bff3e708a73523320e1da167aecb75dc993954f42922" exitCode=143 Sep 29 21:44:35 crc kubenswrapper[4911]: I0929 21:44:35.677907 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"1e871a2c-75f5-4449-8324-84a7ed255bf2","Type":"ContainerDied","Data":"1d60b62cb03c4ea768c2bff3e708a73523320e1da167aecb75dc993954f42922"} Sep 29 21:44:35 crc kubenswrapper[4911]: I0929 21:44:35.680981 4911 generic.go:334] "Generic (PLEG): container finished" podID="d5e82b41-14f5-4503-a919-e9ea37d98ead" containerID="9e398e40eee9096c78e88b82b35278cb275b6a72012e7b3d6ffde9581f2bc329" exitCode=143 Sep 29 21:44:35 crc kubenswrapper[4911]: I0929 21:44:35.681039 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d5e82b41-14f5-4503-a919-e9ea37d98ead","Type":"ContainerDied","Data":"9e398e40eee9096c78e88b82b35278cb275b6a72012e7b3d6ffde9581f2bc329"} Sep 29 21:44:38 crc kubenswrapper[4911]: I0929 21:44:38.060941 4911 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="d5e82b41-14f5-4503-a919-e9ea37d98ead" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.193:8775/\": read tcp 10.217.0.2:57704->10.217.0.193:8775: read: connection reset by peer" Sep 29 21:44:38 crc kubenswrapper[4911]: I0929 21:44:38.061924 4911 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="d5e82b41-14f5-4503-a919-e9ea37d98ead" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.193:8775/\": read tcp 10.217.0.2:57714->10.217.0.193:8775: read: connection reset by peer" Sep 29 21:44:38 crc kubenswrapper[4911]: I0929 21:44:38.519637 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 21:44:38 crc kubenswrapper[4911]: I0929 21:44:38.694025 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rr8xm\" (UniqueName: \"kubernetes.io/projected/d5e82b41-14f5-4503-a919-e9ea37d98ead-kube-api-access-rr8xm\") pod \"d5e82b41-14f5-4503-a919-e9ea37d98ead\" (UID: \"d5e82b41-14f5-4503-a919-e9ea37d98ead\") " Sep 29 21:44:38 crc kubenswrapper[4911]: I0929 21:44:38.694200 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d5e82b41-14f5-4503-a919-e9ea37d98ead-config-data\") pod \"d5e82b41-14f5-4503-a919-e9ea37d98ead\" (UID: \"d5e82b41-14f5-4503-a919-e9ea37d98ead\") " Sep 29 21:44:38 crc kubenswrapper[4911]: I0929 21:44:38.694257 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d5e82b41-14f5-4503-a919-e9ea37d98ead-combined-ca-bundle\") pod \"d5e82b41-14f5-4503-a919-e9ea37d98ead\" (UID: \"d5e82b41-14f5-4503-a919-e9ea37d98ead\") " Sep 29 21:44:38 crc kubenswrapper[4911]: I0929 21:44:38.694293 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d5e82b41-14f5-4503-a919-e9ea37d98ead-logs\") pod \"d5e82b41-14f5-4503-a919-e9ea37d98ead\" (UID: \"d5e82b41-14f5-4503-a919-e9ea37d98ead\") " Sep 29 21:44:38 crc kubenswrapper[4911]: I0929 21:44:38.694320 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/d5e82b41-14f5-4503-a919-e9ea37d98ead-nova-metadata-tls-certs\") pod \"d5e82b41-14f5-4503-a919-e9ea37d98ead\" (UID: \"d5e82b41-14f5-4503-a919-e9ea37d98ead\") " Sep 29 21:44:38 crc kubenswrapper[4911]: I0929 21:44:38.695559 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d5e82b41-14f5-4503-a919-e9ea37d98ead-logs" (OuterVolumeSpecName: "logs") pod "d5e82b41-14f5-4503-a919-e9ea37d98ead" (UID: "d5e82b41-14f5-4503-a919-e9ea37d98ead"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:44:38 crc kubenswrapper[4911]: I0929 21:44:38.701841 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d5e82b41-14f5-4503-a919-e9ea37d98ead-kube-api-access-rr8xm" (OuterVolumeSpecName: "kube-api-access-rr8xm") pod "d5e82b41-14f5-4503-a919-e9ea37d98ead" (UID: "d5e82b41-14f5-4503-a919-e9ea37d98ead"). InnerVolumeSpecName "kube-api-access-rr8xm". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:44:38 crc kubenswrapper[4911]: I0929 21:44:38.718089 4911 generic.go:334] "Generic (PLEG): container finished" podID="d5e82b41-14f5-4503-a919-e9ea37d98ead" containerID="e19a0699c90aeb6e8fde803dee2d962f454faaa447c67dd6aede907fce66da78" exitCode=0 Sep 29 21:44:38 crc kubenswrapper[4911]: I0929 21:44:38.718193 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 21:44:38 crc kubenswrapper[4911]: I0929 21:44:38.737897 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d5e82b41-14f5-4503-a919-e9ea37d98ead-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d5e82b41-14f5-4503-a919-e9ea37d98ead" (UID: "d5e82b41-14f5-4503-a919-e9ea37d98ead"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:44:38 crc kubenswrapper[4911]: I0929 21:44:38.742681 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d5e82b41-14f5-4503-a919-e9ea37d98ead-config-data" (OuterVolumeSpecName: "config-data") pod "d5e82b41-14f5-4503-a919-e9ea37d98ead" (UID: "d5e82b41-14f5-4503-a919-e9ea37d98ead"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:44:38 crc kubenswrapper[4911]: I0929 21:44:38.763353 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d5e82b41-14f5-4503-a919-e9ea37d98ead-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "d5e82b41-14f5-4503-a919-e9ea37d98ead" (UID: "d5e82b41-14f5-4503-a919-e9ea37d98ead"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:44:38 crc kubenswrapper[4911]: I0929 21:44:38.799557 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rr8xm\" (UniqueName: \"kubernetes.io/projected/d5e82b41-14f5-4503-a919-e9ea37d98ead-kube-api-access-rr8xm\") on node \"crc\" DevicePath \"\"" Sep 29 21:44:38 crc kubenswrapper[4911]: I0929 21:44:38.799591 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d5e82b41-14f5-4503-a919-e9ea37d98ead-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 21:44:38 crc kubenswrapper[4911]: I0929 21:44:38.799606 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d5e82b41-14f5-4503-a919-e9ea37d98ead-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 21:44:38 crc kubenswrapper[4911]: I0929 21:44:38.799619 4911 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/d5e82b41-14f5-4503-a919-e9ea37d98ead-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 21:44:38 crc kubenswrapper[4911]: I0929 21:44:38.799630 4911 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d5e82b41-14f5-4503-a919-e9ea37d98ead-logs\") on node \"crc\" DevicePath \"\"" Sep 29 21:44:38 crc kubenswrapper[4911]: I0929 21:44:38.803599 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d5e82b41-14f5-4503-a919-e9ea37d98ead","Type":"ContainerDied","Data":"e19a0699c90aeb6e8fde803dee2d962f454faaa447c67dd6aede907fce66da78"} Sep 29 21:44:38 crc kubenswrapper[4911]: I0929 21:44:38.803675 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d5e82b41-14f5-4503-a919-e9ea37d98ead","Type":"ContainerDied","Data":"f1d481dbac077dfc083286d7728407bb63939167d582dc6f4ec1bccb7c599774"} Sep 29 21:44:38 crc kubenswrapper[4911]: I0929 21:44:38.803726 4911 scope.go:117] "RemoveContainer" containerID="e19a0699c90aeb6e8fde803dee2d962f454faaa447c67dd6aede907fce66da78" Sep 29 21:44:38 crc kubenswrapper[4911]: I0929 21:44:38.827483 4911 scope.go:117] "RemoveContainer" containerID="9e398e40eee9096c78e88b82b35278cb275b6a72012e7b3d6ffde9581f2bc329" Sep 29 21:44:38 crc kubenswrapper[4911]: I0929 21:44:38.850204 4911 scope.go:117] "RemoveContainer" containerID="e19a0699c90aeb6e8fde803dee2d962f454faaa447c67dd6aede907fce66da78" Sep 29 21:44:38 crc kubenswrapper[4911]: E0929 21:44:38.850729 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e19a0699c90aeb6e8fde803dee2d962f454faaa447c67dd6aede907fce66da78\": container with ID starting with e19a0699c90aeb6e8fde803dee2d962f454faaa447c67dd6aede907fce66da78 not found: ID does not exist" containerID="e19a0699c90aeb6e8fde803dee2d962f454faaa447c67dd6aede907fce66da78" Sep 29 21:44:38 crc kubenswrapper[4911]: I0929 21:44:38.850773 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e19a0699c90aeb6e8fde803dee2d962f454faaa447c67dd6aede907fce66da78"} err="failed to get container status \"e19a0699c90aeb6e8fde803dee2d962f454faaa447c67dd6aede907fce66da78\": rpc error: code = NotFound desc = could not find container \"e19a0699c90aeb6e8fde803dee2d962f454faaa447c67dd6aede907fce66da78\": container with ID starting with e19a0699c90aeb6e8fde803dee2d962f454faaa447c67dd6aede907fce66da78 not found: ID does not exist" Sep 29 21:44:38 crc kubenswrapper[4911]: I0929 21:44:38.850819 4911 scope.go:117] "RemoveContainer" containerID="9e398e40eee9096c78e88b82b35278cb275b6a72012e7b3d6ffde9581f2bc329" Sep 29 21:44:38 crc kubenswrapper[4911]: E0929 21:44:38.851085 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9e398e40eee9096c78e88b82b35278cb275b6a72012e7b3d6ffde9581f2bc329\": container with ID starting with 9e398e40eee9096c78e88b82b35278cb275b6a72012e7b3d6ffde9581f2bc329 not found: ID does not exist" containerID="9e398e40eee9096c78e88b82b35278cb275b6a72012e7b3d6ffde9581f2bc329" Sep 29 21:44:38 crc kubenswrapper[4911]: I0929 21:44:38.851123 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9e398e40eee9096c78e88b82b35278cb275b6a72012e7b3d6ffde9581f2bc329"} err="failed to get container status \"9e398e40eee9096c78e88b82b35278cb275b6a72012e7b3d6ffde9581f2bc329\": rpc error: code = NotFound desc = could not find container \"9e398e40eee9096c78e88b82b35278cb275b6a72012e7b3d6ffde9581f2bc329\": container with ID starting with 9e398e40eee9096c78e88b82b35278cb275b6a72012e7b3d6ffde9581f2bc329 not found: ID does not exist" Sep 29 21:44:39 crc kubenswrapper[4911]: I0929 21:44:39.053917 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 21:44:39 crc kubenswrapper[4911]: I0929 21:44:39.065304 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 21:44:39 crc kubenswrapper[4911]: I0929 21:44:39.073856 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Sep 29 21:44:39 crc kubenswrapper[4911]: E0929 21:44:39.074281 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d5e82b41-14f5-4503-a919-e9ea37d98ead" containerName="nova-metadata-metadata" Sep 29 21:44:39 crc kubenswrapper[4911]: I0929 21:44:39.074303 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="d5e82b41-14f5-4503-a919-e9ea37d98ead" containerName="nova-metadata-metadata" Sep 29 21:44:39 crc kubenswrapper[4911]: E0929 21:44:39.074319 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d5e82b41-14f5-4503-a919-e9ea37d98ead" containerName="nova-metadata-log" Sep 29 21:44:39 crc kubenswrapper[4911]: I0929 21:44:39.074326 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="d5e82b41-14f5-4503-a919-e9ea37d98ead" containerName="nova-metadata-log" Sep 29 21:44:39 crc kubenswrapper[4911]: E0929 21:44:39.074344 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3046cee5-d66c-4a66-bed7-2f4a36df4113" containerName="init" Sep 29 21:44:39 crc kubenswrapper[4911]: I0929 21:44:39.074364 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="3046cee5-d66c-4a66-bed7-2f4a36df4113" containerName="init" Sep 29 21:44:39 crc kubenswrapper[4911]: E0929 21:44:39.074377 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8feb84b9-2d26-49c2-b4cd-6504c42773e6" containerName="nova-manage" Sep 29 21:44:39 crc kubenswrapper[4911]: I0929 21:44:39.074383 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="8feb84b9-2d26-49c2-b4cd-6504c42773e6" containerName="nova-manage" Sep 29 21:44:39 crc kubenswrapper[4911]: E0929 21:44:39.074409 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3046cee5-d66c-4a66-bed7-2f4a36df4113" containerName="dnsmasq-dns" Sep 29 21:44:39 crc kubenswrapper[4911]: I0929 21:44:39.074417 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="3046cee5-d66c-4a66-bed7-2f4a36df4113" containerName="dnsmasq-dns" Sep 29 21:44:39 crc kubenswrapper[4911]: I0929 21:44:39.074606 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="d5e82b41-14f5-4503-a919-e9ea37d98ead" containerName="nova-metadata-log" Sep 29 21:44:39 crc kubenswrapper[4911]: I0929 21:44:39.074630 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="d5e82b41-14f5-4503-a919-e9ea37d98ead" containerName="nova-metadata-metadata" Sep 29 21:44:39 crc kubenswrapper[4911]: I0929 21:44:39.074646 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="3046cee5-d66c-4a66-bed7-2f4a36df4113" containerName="dnsmasq-dns" Sep 29 21:44:39 crc kubenswrapper[4911]: I0929 21:44:39.074652 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="8feb84b9-2d26-49c2-b4cd-6504c42773e6" containerName="nova-manage" Sep 29 21:44:39 crc kubenswrapper[4911]: I0929 21:44:39.075700 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 21:44:39 crc kubenswrapper[4911]: I0929 21:44:39.078456 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Sep 29 21:44:39 crc kubenswrapper[4911]: I0929 21:44:39.078581 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Sep 29 21:44:39 crc kubenswrapper[4911]: I0929 21:44:39.092903 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 21:44:39 crc kubenswrapper[4911]: I0929 21:44:39.206712 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/50bcb1f1-af2c-410c-bbca-fc0fd9b05f4e-logs\") pod \"nova-metadata-0\" (UID: \"50bcb1f1-af2c-410c-bbca-fc0fd9b05f4e\") " pod="openstack/nova-metadata-0" Sep 29 21:44:39 crc kubenswrapper[4911]: I0929 21:44:39.206774 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/50bcb1f1-af2c-410c-bbca-fc0fd9b05f4e-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"50bcb1f1-af2c-410c-bbca-fc0fd9b05f4e\") " pod="openstack/nova-metadata-0" Sep 29 21:44:39 crc kubenswrapper[4911]: I0929 21:44:39.206815 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50bcb1f1-af2c-410c-bbca-fc0fd9b05f4e-config-data\") pod \"nova-metadata-0\" (UID: \"50bcb1f1-af2c-410c-bbca-fc0fd9b05f4e\") " pod="openstack/nova-metadata-0" Sep 29 21:44:39 crc kubenswrapper[4911]: I0929 21:44:39.206856 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zfmr6\" (UniqueName: \"kubernetes.io/projected/50bcb1f1-af2c-410c-bbca-fc0fd9b05f4e-kube-api-access-zfmr6\") pod \"nova-metadata-0\" (UID: \"50bcb1f1-af2c-410c-bbca-fc0fd9b05f4e\") " pod="openstack/nova-metadata-0" Sep 29 21:44:39 crc kubenswrapper[4911]: I0929 21:44:39.206912 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50bcb1f1-af2c-410c-bbca-fc0fd9b05f4e-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"50bcb1f1-af2c-410c-bbca-fc0fd9b05f4e\") " pod="openstack/nova-metadata-0" Sep 29 21:44:39 crc kubenswrapper[4911]: I0929 21:44:39.308897 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/50bcb1f1-af2c-410c-bbca-fc0fd9b05f4e-logs\") pod \"nova-metadata-0\" (UID: \"50bcb1f1-af2c-410c-bbca-fc0fd9b05f4e\") " pod="openstack/nova-metadata-0" Sep 29 21:44:39 crc kubenswrapper[4911]: I0929 21:44:39.308949 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/50bcb1f1-af2c-410c-bbca-fc0fd9b05f4e-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"50bcb1f1-af2c-410c-bbca-fc0fd9b05f4e\") " pod="openstack/nova-metadata-0" Sep 29 21:44:39 crc kubenswrapper[4911]: I0929 21:44:39.308968 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50bcb1f1-af2c-410c-bbca-fc0fd9b05f4e-config-data\") pod \"nova-metadata-0\" (UID: \"50bcb1f1-af2c-410c-bbca-fc0fd9b05f4e\") " pod="openstack/nova-metadata-0" Sep 29 21:44:39 crc kubenswrapper[4911]: I0929 21:44:39.308994 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zfmr6\" (UniqueName: \"kubernetes.io/projected/50bcb1f1-af2c-410c-bbca-fc0fd9b05f4e-kube-api-access-zfmr6\") pod \"nova-metadata-0\" (UID: \"50bcb1f1-af2c-410c-bbca-fc0fd9b05f4e\") " pod="openstack/nova-metadata-0" Sep 29 21:44:39 crc kubenswrapper[4911]: I0929 21:44:39.309034 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50bcb1f1-af2c-410c-bbca-fc0fd9b05f4e-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"50bcb1f1-af2c-410c-bbca-fc0fd9b05f4e\") " pod="openstack/nova-metadata-0" Sep 29 21:44:39 crc kubenswrapper[4911]: I0929 21:44:39.309993 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/50bcb1f1-af2c-410c-bbca-fc0fd9b05f4e-logs\") pod \"nova-metadata-0\" (UID: \"50bcb1f1-af2c-410c-bbca-fc0fd9b05f4e\") " pod="openstack/nova-metadata-0" Sep 29 21:44:39 crc kubenswrapper[4911]: I0929 21:44:39.313899 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/50bcb1f1-af2c-410c-bbca-fc0fd9b05f4e-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"50bcb1f1-af2c-410c-bbca-fc0fd9b05f4e\") " pod="openstack/nova-metadata-0" Sep 29 21:44:39 crc kubenswrapper[4911]: I0929 21:44:39.319545 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50bcb1f1-af2c-410c-bbca-fc0fd9b05f4e-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"50bcb1f1-af2c-410c-bbca-fc0fd9b05f4e\") " pod="openstack/nova-metadata-0" Sep 29 21:44:39 crc kubenswrapper[4911]: I0929 21:44:39.324756 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50bcb1f1-af2c-410c-bbca-fc0fd9b05f4e-config-data\") pod \"nova-metadata-0\" (UID: \"50bcb1f1-af2c-410c-bbca-fc0fd9b05f4e\") " pod="openstack/nova-metadata-0" Sep 29 21:44:39 crc kubenswrapper[4911]: I0929 21:44:39.332514 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zfmr6\" (UniqueName: \"kubernetes.io/projected/50bcb1f1-af2c-410c-bbca-fc0fd9b05f4e-kube-api-access-zfmr6\") pod \"nova-metadata-0\" (UID: \"50bcb1f1-af2c-410c-bbca-fc0fd9b05f4e\") " pod="openstack/nova-metadata-0" Sep 29 21:44:39 crc kubenswrapper[4911]: I0929 21:44:39.397409 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 21:44:39 crc kubenswrapper[4911]: I0929 21:44:39.740768 4911 generic.go:334] "Generic (PLEG): container finished" podID="4625af6e-56d3-4c1e-bac1-1ba58400e121" containerID="536eb3ebe43fffa2e7fcb559b3c2271b8c722bf6e77c1c9dc7d5b845ee7bdc59" exitCode=0 Sep 29 21:44:39 crc kubenswrapper[4911]: I0929 21:44:39.740919 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"4625af6e-56d3-4c1e-bac1-1ba58400e121","Type":"ContainerDied","Data":"536eb3ebe43fffa2e7fcb559b3c2271b8c722bf6e77c1c9dc7d5b845ee7bdc59"} Sep 29 21:44:39 crc kubenswrapper[4911]: I0929 21:44:39.865015 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 21:44:39 crc kubenswrapper[4911]: I0929 21:44:39.879722 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 21:44:39 crc kubenswrapper[4911]: W0929 21:44:39.893092 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod50bcb1f1_af2c_410c_bbca_fc0fd9b05f4e.slice/crio-859857fff4ed6da337d7c73899b61b105c9665ea02dbfc78e719aaf0f8051983 WatchSource:0}: Error finding container 859857fff4ed6da337d7c73899b61b105c9665ea02dbfc78e719aaf0f8051983: Status 404 returned error can't find the container with id 859857fff4ed6da337d7c73899b61b105c9665ea02dbfc78e719aaf0f8051983 Sep 29 21:44:40 crc kubenswrapper[4911]: I0929 21:44:40.021460 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4625af6e-56d3-4c1e-bac1-1ba58400e121-combined-ca-bundle\") pod \"4625af6e-56d3-4c1e-bac1-1ba58400e121\" (UID: \"4625af6e-56d3-4c1e-bac1-1ba58400e121\") " Sep 29 21:44:40 crc kubenswrapper[4911]: I0929 21:44:40.021661 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4625af6e-56d3-4c1e-bac1-1ba58400e121-config-data\") pod \"4625af6e-56d3-4c1e-bac1-1ba58400e121\" (UID: \"4625af6e-56d3-4c1e-bac1-1ba58400e121\") " Sep 29 21:44:40 crc kubenswrapper[4911]: I0929 21:44:40.021772 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mvwcb\" (UniqueName: \"kubernetes.io/projected/4625af6e-56d3-4c1e-bac1-1ba58400e121-kube-api-access-mvwcb\") pod \"4625af6e-56d3-4c1e-bac1-1ba58400e121\" (UID: \"4625af6e-56d3-4c1e-bac1-1ba58400e121\") " Sep 29 21:44:40 crc kubenswrapper[4911]: I0929 21:44:40.033104 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4625af6e-56d3-4c1e-bac1-1ba58400e121-kube-api-access-mvwcb" (OuterVolumeSpecName: "kube-api-access-mvwcb") pod "4625af6e-56d3-4c1e-bac1-1ba58400e121" (UID: "4625af6e-56d3-4c1e-bac1-1ba58400e121"). InnerVolumeSpecName "kube-api-access-mvwcb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:44:40 crc kubenswrapper[4911]: I0929 21:44:40.060770 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4625af6e-56d3-4c1e-bac1-1ba58400e121-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4625af6e-56d3-4c1e-bac1-1ba58400e121" (UID: "4625af6e-56d3-4c1e-bac1-1ba58400e121"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:44:40 crc kubenswrapper[4911]: I0929 21:44:40.082002 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4625af6e-56d3-4c1e-bac1-1ba58400e121-config-data" (OuterVolumeSpecName: "config-data") pod "4625af6e-56d3-4c1e-bac1-1ba58400e121" (UID: "4625af6e-56d3-4c1e-bac1-1ba58400e121"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:44:40 crc kubenswrapper[4911]: I0929 21:44:40.123888 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mvwcb\" (UniqueName: \"kubernetes.io/projected/4625af6e-56d3-4c1e-bac1-1ba58400e121-kube-api-access-mvwcb\") on node \"crc\" DevicePath \"\"" Sep 29 21:44:40 crc kubenswrapper[4911]: I0929 21:44:40.123930 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4625af6e-56d3-4c1e-bac1-1ba58400e121-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 21:44:40 crc kubenswrapper[4911]: I0929 21:44:40.123948 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4625af6e-56d3-4c1e-bac1-1ba58400e121-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 21:44:40 crc kubenswrapper[4911]: I0929 21:44:40.715574 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d5e82b41-14f5-4503-a919-e9ea37d98ead" path="/var/lib/kubelet/pods/d5e82b41-14f5-4503-a919-e9ea37d98ead/volumes" Sep 29 21:44:40 crc kubenswrapper[4911]: I0929 21:44:40.739784 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 21:44:40 crc kubenswrapper[4911]: I0929 21:44:40.754889 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"50bcb1f1-af2c-410c-bbca-fc0fd9b05f4e","Type":"ContainerStarted","Data":"6d73e674f56bc74a052a40f22ea7809d97151c6605ed5189e0de39300108a189"} Sep 29 21:44:40 crc kubenswrapper[4911]: I0929 21:44:40.755018 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"50bcb1f1-af2c-410c-bbca-fc0fd9b05f4e","Type":"ContainerStarted","Data":"227ac6575a0386661127dfd5c2eca70019fa79b09bba6d168f4cae86b0e88859"} Sep 29 21:44:40 crc kubenswrapper[4911]: I0929 21:44:40.755041 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"50bcb1f1-af2c-410c-bbca-fc0fd9b05f4e","Type":"ContainerStarted","Data":"859857fff4ed6da337d7c73899b61b105c9665ea02dbfc78e719aaf0f8051983"} Sep 29 21:44:40 crc kubenswrapper[4911]: I0929 21:44:40.762575 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"4625af6e-56d3-4c1e-bac1-1ba58400e121","Type":"ContainerDied","Data":"5198e47a1cc442817c3fd3c9a8d05c9820fa5e0e863e8f6e9bc37c11c13432cf"} Sep 29 21:44:40 crc kubenswrapper[4911]: I0929 21:44:40.762679 4911 scope.go:117] "RemoveContainer" containerID="536eb3ebe43fffa2e7fcb559b3c2271b8c722bf6e77c1c9dc7d5b845ee7bdc59" Sep 29 21:44:40 crc kubenswrapper[4911]: I0929 21:44:40.762845 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 21:44:40 crc kubenswrapper[4911]: I0929 21:44:40.787123 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=1.7871054929999999 podStartE2EDuration="1.787105493s" podCreationTimestamp="2025-09-29 21:44:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:44:40.783728027 +0000 UTC m=+1158.760840698" watchObservedRunningTime="2025-09-29 21:44:40.787105493 +0000 UTC m=+1158.764218154" Sep 29 21:44:40 crc kubenswrapper[4911]: I0929 21:44:40.833680 4911 generic.go:334] "Generic (PLEG): container finished" podID="1e871a2c-75f5-4449-8324-84a7ed255bf2" containerID="d3082b58c363cd321e8435a6bac6fbe05b458f2d3ab206d01d45d4ec8ec7c12c" exitCode=0 Sep 29 21:44:40 crc kubenswrapper[4911]: I0929 21:44:40.833736 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"1e871a2c-75f5-4449-8324-84a7ed255bf2","Type":"ContainerDied","Data":"d3082b58c363cd321e8435a6bac6fbe05b458f2d3ab206d01d45d4ec8ec7c12c"} Sep 29 21:44:40 crc kubenswrapper[4911]: I0929 21:44:40.833763 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"1e871a2c-75f5-4449-8324-84a7ed255bf2","Type":"ContainerDied","Data":"8436265b1add2b9f3fd1c3599d311e258f72b014851893b20cdbeef936bfff3d"} Sep 29 21:44:40 crc kubenswrapper[4911]: I0929 21:44:40.833881 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 21:44:40 crc kubenswrapper[4911]: I0929 21:44:40.840648 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tvcnp\" (UniqueName: \"kubernetes.io/projected/1e871a2c-75f5-4449-8324-84a7ed255bf2-kube-api-access-tvcnp\") pod \"1e871a2c-75f5-4449-8324-84a7ed255bf2\" (UID: \"1e871a2c-75f5-4449-8324-84a7ed255bf2\") " Sep 29 21:44:40 crc kubenswrapper[4911]: I0929 21:44:40.840733 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e871a2c-75f5-4449-8324-84a7ed255bf2-public-tls-certs\") pod \"1e871a2c-75f5-4449-8324-84a7ed255bf2\" (UID: \"1e871a2c-75f5-4449-8324-84a7ed255bf2\") " Sep 29 21:44:40 crc kubenswrapper[4911]: I0929 21:44:40.840780 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e871a2c-75f5-4449-8324-84a7ed255bf2-combined-ca-bundle\") pod \"1e871a2c-75f5-4449-8324-84a7ed255bf2\" (UID: \"1e871a2c-75f5-4449-8324-84a7ed255bf2\") " Sep 29 21:44:40 crc kubenswrapper[4911]: I0929 21:44:40.840856 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1e871a2c-75f5-4449-8324-84a7ed255bf2-config-data\") pod \"1e871a2c-75f5-4449-8324-84a7ed255bf2\" (UID: \"1e871a2c-75f5-4449-8324-84a7ed255bf2\") " Sep 29 21:44:40 crc kubenswrapper[4911]: I0929 21:44:40.840883 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1e871a2c-75f5-4449-8324-84a7ed255bf2-logs\") pod \"1e871a2c-75f5-4449-8324-84a7ed255bf2\" (UID: \"1e871a2c-75f5-4449-8324-84a7ed255bf2\") " Sep 29 21:44:40 crc kubenswrapper[4911]: I0929 21:44:40.840925 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e871a2c-75f5-4449-8324-84a7ed255bf2-internal-tls-certs\") pod \"1e871a2c-75f5-4449-8324-84a7ed255bf2\" (UID: \"1e871a2c-75f5-4449-8324-84a7ed255bf2\") " Sep 29 21:44:40 crc kubenswrapper[4911]: I0929 21:44:40.841703 4911 scope.go:117] "RemoveContainer" containerID="d3082b58c363cd321e8435a6bac6fbe05b458f2d3ab206d01d45d4ec8ec7c12c" Sep 29 21:44:40 crc kubenswrapper[4911]: I0929 21:44:40.843610 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1e871a2c-75f5-4449-8324-84a7ed255bf2-logs" (OuterVolumeSpecName: "logs") pod "1e871a2c-75f5-4449-8324-84a7ed255bf2" (UID: "1e871a2c-75f5-4449-8324-84a7ed255bf2"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:44:40 crc kubenswrapper[4911]: I0929 21:44:40.868549 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1e871a2c-75f5-4449-8324-84a7ed255bf2-kube-api-access-tvcnp" (OuterVolumeSpecName: "kube-api-access-tvcnp") pod "1e871a2c-75f5-4449-8324-84a7ed255bf2" (UID: "1e871a2c-75f5-4449-8324-84a7ed255bf2"). InnerVolumeSpecName "kube-api-access-tvcnp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:44:40 crc kubenswrapper[4911]: I0929 21:44:40.874957 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1e871a2c-75f5-4449-8324-84a7ed255bf2-config-data" (OuterVolumeSpecName: "config-data") pod "1e871a2c-75f5-4449-8324-84a7ed255bf2" (UID: "1e871a2c-75f5-4449-8324-84a7ed255bf2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:44:40 crc kubenswrapper[4911]: I0929 21:44:40.875880 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 21:44:40 crc kubenswrapper[4911]: I0929 21:44:40.878434 4911 scope.go:117] "RemoveContainer" containerID="1d60b62cb03c4ea768c2bff3e708a73523320e1da167aecb75dc993954f42922" Sep 29 21:44:40 crc kubenswrapper[4911]: I0929 21:44:40.884372 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1e871a2c-75f5-4449-8324-84a7ed255bf2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1e871a2c-75f5-4449-8324-84a7ed255bf2" (UID: "1e871a2c-75f5-4449-8324-84a7ed255bf2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:44:40 crc kubenswrapper[4911]: I0929 21:44:40.891881 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 21:44:40 crc kubenswrapper[4911]: I0929 21:44:40.924480 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1e871a2c-75f5-4449-8324-84a7ed255bf2-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "1e871a2c-75f5-4449-8324-84a7ed255bf2" (UID: "1e871a2c-75f5-4449-8324-84a7ed255bf2"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:44:40 crc kubenswrapper[4911]: I0929 21:44:40.925643 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 21:44:40 crc kubenswrapper[4911]: E0929 21:44:40.926059 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e871a2c-75f5-4449-8324-84a7ed255bf2" containerName="nova-api-log" Sep 29 21:44:40 crc kubenswrapper[4911]: I0929 21:44:40.926079 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e871a2c-75f5-4449-8324-84a7ed255bf2" containerName="nova-api-log" Sep 29 21:44:40 crc kubenswrapper[4911]: E0929 21:44:40.926096 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e871a2c-75f5-4449-8324-84a7ed255bf2" containerName="nova-api-api" Sep 29 21:44:40 crc kubenswrapper[4911]: I0929 21:44:40.926103 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e871a2c-75f5-4449-8324-84a7ed255bf2" containerName="nova-api-api" Sep 29 21:44:40 crc kubenswrapper[4911]: E0929 21:44:40.926134 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4625af6e-56d3-4c1e-bac1-1ba58400e121" containerName="nova-scheduler-scheduler" Sep 29 21:44:40 crc kubenswrapper[4911]: I0929 21:44:40.926139 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="4625af6e-56d3-4c1e-bac1-1ba58400e121" containerName="nova-scheduler-scheduler" Sep 29 21:44:40 crc kubenswrapper[4911]: I0929 21:44:40.926321 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="1e871a2c-75f5-4449-8324-84a7ed255bf2" containerName="nova-api-api" Sep 29 21:44:40 crc kubenswrapper[4911]: I0929 21:44:40.926341 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="4625af6e-56d3-4c1e-bac1-1ba58400e121" containerName="nova-scheduler-scheduler" Sep 29 21:44:40 crc kubenswrapper[4911]: I0929 21:44:40.926351 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="1e871a2c-75f5-4449-8324-84a7ed255bf2" containerName="nova-api-log" Sep 29 21:44:40 crc kubenswrapper[4911]: I0929 21:44:40.927044 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 21:44:40 crc kubenswrapper[4911]: I0929 21:44:40.929291 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Sep 29 21:44:40 crc kubenswrapper[4911]: I0929 21:44:40.937752 4911 scope.go:117] "RemoveContainer" containerID="d3082b58c363cd321e8435a6bac6fbe05b458f2d3ab206d01d45d4ec8ec7c12c" Sep 29 21:44:40 crc kubenswrapper[4911]: I0929 21:44:40.939015 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 21:44:40 crc kubenswrapper[4911]: E0929 21:44:40.939211 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d3082b58c363cd321e8435a6bac6fbe05b458f2d3ab206d01d45d4ec8ec7c12c\": container with ID starting with d3082b58c363cd321e8435a6bac6fbe05b458f2d3ab206d01d45d4ec8ec7c12c not found: ID does not exist" containerID="d3082b58c363cd321e8435a6bac6fbe05b458f2d3ab206d01d45d4ec8ec7c12c" Sep 29 21:44:40 crc kubenswrapper[4911]: I0929 21:44:40.939243 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d3082b58c363cd321e8435a6bac6fbe05b458f2d3ab206d01d45d4ec8ec7c12c"} err="failed to get container status \"d3082b58c363cd321e8435a6bac6fbe05b458f2d3ab206d01d45d4ec8ec7c12c\": rpc error: code = NotFound desc = could not find container \"d3082b58c363cd321e8435a6bac6fbe05b458f2d3ab206d01d45d4ec8ec7c12c\": container with ID starting with d3082b58c363cd321e8435a6bac6fbe05b458f2d3ab206d01d45d4ec8ec7c12c not found: ID does not exist" Sep 29 21:44:40 crc kubenswrapper[4911]: I0929 21:44:40.939266 4911 scope.go:117] "RemoveContainer" containerID="1d60b62cb03c4ea768c2bff3e708a73523320e1da167aecb75dc993954f42922" Sep 29 21:44:40 crc kubenswrapper[4911]: E0929 21:44:40.939899 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1d60b62cb03c4ea768c2bff3e708a73523320e1da167aecb75dc993954f42922\": container with ID starting with 1d60b62cb03c4ea768c2bff3e708a73523320e1da167aecb75dc993954f42922 not found: ID does not exist" containerID="1d60b62cb03c4ea768c2bff3e708a73523320e1da167aecb75dc993954f42922" Sep 29 21:44:40 crc kubenswrapper[4911]: I0929 21:44:40.939923 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d60b62cb03c4ea768c2bff3e708a73523320e1da167aecb75dc993954f42922"} err="failed to get container status \"1d60b62cb03c4ea768c2bff3e708a73523320e1da167aecb75dc993954f42922\": rpc error: code = NotFound desc = could not find container \"1d60b62cb03c4ea768c2bff3e708a73523320e1da167aecb75dc993954f42922\": container with ID starting with 1d60b62cb03c4ea768c2bff3e708a73523320e1da167aecb75dc993954f42922 not found: ID does not exist" Sep 29 21:44:40 crc kubenswrapper[4911]: I0929 21:44:40.945014 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1e871a2c-75f5-4449-8324-84a7ed255bf2-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "1e871a2c-75f5-4449-8324-84a7ed255bf2" (UID: "1e871a2c-75f5-4449-8324-84a7ed255bf2"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:44:40 crc kubenswrapper[4911]: I0929 21:44:40.949215 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tvcnp\" (UniqueName: \"kubernetes.io/projected/1e871a2c-75f5-4449-8324-84a7ed255bf2-kube-api-access-tvcnp\") on node \"crc\" DevicePath \"\"" Sep 29 21:44:40 crc kubenswrapper[4911]: I0929 21:44:40.949326 4911 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e871a2c-75f5-4449-8324-84a7ed255bf2-public-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 21:44:40 crc kubenswrapper[4911]: I0929 21:44:40.949393 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e871a2c-75f5-4449-8324-84a7ed255bf2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 21:44:40 crc kubenswrapper[4911]: I0929 21:44:40.949448 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1e871a2c-75f5-4449-8324-84a7ed255bf2-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 21:44:40 crc kubenswrapper[4911]: I0929 21:44:40.949542 4911 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1e871a2c-75f5-4449-8324-84a7ed255bf2-logs\") on node \"crc\" DevicePath \"\"" Sep 29 21:44:40 crc kubenswrapper[4911]: I0929 21:44:40.949614 4911 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e871a2c-75f5-4449-8324-84a7ed255bf2-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 21:44:41 crc kubenswrapper[4911]: I0929 21:44:41.051772 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b196d32a-bb27-4cc0-929f-c49f7a33a52a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"b196d32a-bb27-4cc0-929f-c49f7a33a52a\") " pod="openstack/nova-scheduler-0" Sep 29 21:44:41 crc kubenswrapper[4911]: I0929 21:44:41.052101 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b196d32a-bb27-4cc0-929f-c49f7a33a52a-config-data\") pod \"nova-scheduler-0\" (UID: \"b196d32a-bb27-4cc0-929f-c49f7a33a52a\") " pod="openstack/nova-scheduler-0" Sep 29 21:44:41 crc kubenswrapper[4911]: I0929 21:44:41.052471 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6jscc\" (UniqueName: \"kubernetes.io/projected/b196d32a-bb27-4cc0-929f-c49f7a33a52a-kube-api-access-6jscc\") pod \"nova-scheduler-0\" (UID: \"b196d32a-bb27-4cc0-929f-c49f7a33a52a\") " pod="openstack/nova-scheduler-0" Sep 29 21:44:41 crc kubenswrapper[4911]: I0929 21:44:41.154660 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6jscc\" (UniqueName: \"kubernetes.io/projected/b196d32a-bb27-4cc0-929f-c49f7a33a52a-kube-api-access-6jscc\") pod \"nova-scheduler-0\" (UID: \"b196d32a-bb27-4cc0-929f-c49f7a33a52a\") " pod="openstack/nova-scheduler-0" Sep 29 21:44:41 crc kubenswrapper[4911]: I0929 21:44:41.154742 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b196d32a-bb27-4cc0-929f-c49f7a33a52a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"b196d32a-bb27-4cc0-929f-c49f7a33a52a\") " pod="openstack/nova-scheduler-0" Sep 29 21:44:41 crc kubenswrapper[4911]: I0929 21:44:41.154854 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b196d32a-bb27-4cc0-929f-c49f7a33a52a-config-data\") pod \"nova-scheduler-0\" (UID: \"b196d32a-bb27-4cc0-929f-c49f7a33a52a\") " pod="openstack/nova-scheduler-0" Sep 29 21:44:41 crc kubenswrapper[4911]: I0929 21:44:41.158724 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b196d32a-bb27-4cc0-929f-c49f7a33a52a-config-data\") pod \"nova-scheduler-0\" (UID: \"b196d32a-bb27-4cc0-929f-c49f7a33a52a\") " pod="openstack/nova-scheduler-0" Sep 29 21:44:41 crc kubenswrapper[4911]: I0929 21:44:41.159985 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b196d32a-bb27-4cc0-929f-c49f7a33a52a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"b196d32a-bb27-4cc0-929f-c49f7a33a52a\") " pod="openstack/nova-scheduler-0" Sep 29 21:44:41 crc kubenswrapper[4911]: I0929 21:44:41.172689 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6jscc\" (UniqueName: \"kubernetes.io/projected/b196d32a-bb27-4cc0-929f-c49f7a33a52a-kube-api-access-6jscc\") pod \"nova-scheduler-0\" (UID: \"b196d32a-bb27-4cc0-929f-c49f7a33a52a\") " pod="openstack/nova-scheduler-0" Sep 29 21:44:41 crc kubenswrapper[4911]: I0929 21:44:41.249036 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 21:44:41 crc kubenswrapper[4911]: I0929 21:44:41.254409 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 29 21:44:41 crc kubenswrapper[4911]: I0929 21:44:41.265830 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Sep 29 21:44:41 crc kubenswrapper[4911]: I0929 21:44:41.298492 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Sep 29 21:44:41 crc kubenswrapper[4911]: I0929 21:44:41.300445 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 21:44:41 crc kubenswrapper[4911]: I0929 21:44:41.302908 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Sep 29 21:44:41 crc kubenswrapper[4911]: I0929 21:44:41.303420 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Sep 29 21:44:41 crc kubenswrapper[4911]: I0929 21:44:41.303673 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Sep 29 21:44:41 crc kubenswrapper[4911]: I0929 21:44:41.311834 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 29 21:44:41 crc kubenswrapper[4911]: I0929 21:44:41.461068 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/64c7af34-3461-4dcd-9caf-5ff6f5fb90af-internal-tls-certs\") pod \"nova-api-0\" (UID: \"64c7af34-3461-4dcd-9caf-5ff6f5fb90af\") " pod="openstack/nova-api-0" Sep 29 21:44:41 crc kubenswrapper[4911]: I0929 21:44:41.461409 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64c7af34-3461-4dcd-9caf-5ff6f5fb90af-config-data\") pod \"nova-api-0\" (UID: \"64c7af34-3461-4dcd-9caf-5ff6f5fb90af\") " pod="openstack/nova-api-0" Sep 29 21:44:41 crc kubenswrapper[4911]: I0929 21:44:41.461476 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/64c7af34-3461-4dcd-9caf-5ff6f5fb90af-public-tls-certs\") pod \"nova-api-0\" (UID: \"64c7af34-3461-4dcd-9caf-5ff6f5fb90af\") " pod="openstack/nova-api-0" Sep 29 21:44:41 crc kubenswrapper[4911]: I0929 21:44:41.461640 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4w655\" (UniqueName: \"kubernetes.io/projected/64c7af34-3461-4dcd-9caf-5ff6f5fb90af-kube-api-access-4w655\") pod \"nova-api-0\" (UID: \"64c7af34-3461-4dcd-9caf-5ff6f5fb90af\") " pod="openstack/nova-api-0" Sep 29 21:44:41 crc kubenswrapper[4911]: I0929 21:44:41.461988 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64c7af34-3461-4dcd-9caf-5ff6f5fb90af-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"64c7af34-3461-4dcd-9caf-5ff6f5fb90af\") " pod="openstack/nova-api-0" Sep 29 21:44:41 crc kubenswrapper[4911]: I0929 21:44:41.462033 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/64c7af34-3461-4dcd-9caf-5ff6f5fb90af-logs\") pod \"nova-api-0\" (UID: \"64c7af34-3461-4dcd-9caf-5ff6f5fb90af\") " pod="openstack/nova-api-0" Sep 29 21:44:41 crc kubenswrapper[4911]: I0929 21:44:41.564011 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/64c7af34-3461-4dcd-9caf-5ff6f5fb90af-public-tls-certs\") pod \"nova-api-0\" (UID: \"64c7af34-3461-4dcd-9caf-5ff6f5fb90af\") " pod="openstack/nova-api-0" Sep 29 21:44:41 crc kubenswrapper[4911]: I0929 21:44:41.564072 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4w655\" (UniqueName: \"kubernetes.io/projected/64c7af34-3461-4dcd-9caf-5ff6f5fb90af-kube-api-access-4w655\") pod \"nova-api-0\" (UID: \"64c7af34-3461-4dcd-9caf-5ff6f5fb90af\") " pod="openstack/nova-api-0" Sep 29 21:44:41 crc kubenswrapper[4911]: I0929 21:44:41.564150 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64c7af34-3461-4dcd-9caf-5ff6f5fb90af-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"64c7af34-3461-4dcd-9caf-5ff6f5fb90af\") " pod="openstack/nova-api-0" Sep 29 21:44:41 crc kubenswrapper[4911]: I0929 21:44:41.564170 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/64c7af34-3461-4dcd-9caf-5ff6f5fb90af-logs\") pod \"nova-api-0\" (UID: \"64c7af34-3461-4dcd-9caf-5ff6f5fb90af\") " pod="openstack/nova-api-0" Sep 29 21:44:41 crc kubenswrapper[4911]: I0929 21:44:41.564220 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/64c7af34-3461-4dcd-9caf-5ff6f5fb90af-internal-tls-certs\") pod \"nova-api-0\" (UID: \"64c7af34-3461-4dcd-9caf-5ff6f5fb90af\") " pod="openstack/nova-api-0" Sep 29 21:44:41 crc kubenswrapper[4911]: I0929 21:44:41.564236 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64c7af34-3461-4dcd-9caf-5ff6f5fb90af-config-data\") pod \"nova-api-0\" (UID: \"64c7af34-3461-4dcd-9caf-5ff6f5fb90af\") " pod="openstack/nova-api-0" Sep 29 21:44:41 crc kubenswrapper[4911]: I0929 21:44:41.564967 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/64c7af34-3461-4dcd-9caf-5ff6f5fb90af-logs\") pod \"nova-api-0\" (UID: \"64c7af34-3461-4dcd-9caf-5ff6f5fb90af\") " pod="openstack/nova-api-0" Sep 29 21:44:41 crc kubenswrapper[4911]: I0929 21:44:41.569502 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64c7af34-3461-4dcd-9caf-5ff6f5fb90af-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"64c7af34-3461-4dcd-9caf-5ff6f5fb90af\") " pod="openstack/nova-api-0" Sep 29 21:44:41 crc kubenswrapper[4911]: I0929 21:44:41.569862 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64c7af34-3461-4dcd-9caf-5ff6f5fb90af-config-data\") pod \"nova-api-0\" (UID: \"64c7af34-3461-4dcd-9caf-5ff6f5fb90af\") " pod="openstack/nova-api-0" Sep 29 21:44:41 crc kubenswrapper[4911]: I0929 21:44:41.569981 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/64c7af34-3461-4dcd-9caf-5ff6f5fb90af-public-tls-certs\") pod \"nova-api-0\" (UID: \"64c7af34-3461-4dcd-9caf-5ff6f5fb90af\") " pod="openstack/nova-api-0" Sep 29 21:44:41 crc kubenswrapper[4911]: I0929 21:44:41.570459 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/64c7af34-3461-4dcd-9caf-5ff6f5fb90af-internal-tls-certs\") pod \"nova-api-0\" (UID: \"64c7af34-3461-4dcd-9caf-5ff6f5fb90af\") " pod="openstack/nova-api-0" Sep 29 21:44:41 crc kubenswrapper[4911]: I0929 21:44:41.595050 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4w655\" (UniqueName: \"kubernetes.io/projected/64c7af34-3461-4dcd-9caf-5ff6f5fb90af-kube-api-access-4w655\") pod \"nova-api-0\" (UID: \"64c7af34-3461-4dcd-9caf-5ff6f5fb90af\") " pod="openstack/nova-api-0" Sep 29 21:44:41 crc kubenswrapper[4911]: I0929 21:44:41.670682 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 21:44:41 crc kubenswrapper[4911]: I0929 21:44:41.759958 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 21:44:41 crc kubenswrapper[4911]: W0929 21:44:41.762597 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb196d32a_bb27_4cc0_929f_c49f7a33a52a.slice/crio-0bba8ded70e5cc29d730f29edea11b76357ae188273673868022942d1cb4cbe3 WatchSource:0}: Error finding container 0bba8ded70e5cc29d730f29edea11b76357ae188273673868022942d1cb4cbe3: Status 404 returned error can't find the container with id 0bba8ded70e5cc29d730f29edea11b76357ae188273673868022942d1cb4cbe3 Sep 29 21:44:42 crc kubenswrapper[4911]: I0929 21:44:41.892038 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"b196d32a-bb27-4cc0-929f-c49f7a33a52a","Type":"ContainerStarted","Data":"0bba8ded70e5cc29d730f29edea11b76357ae188273673868022942d1cb4cbe3"} Sep 29 21:44:42 crc kubenswrapper[4911]: I0929 21:44:42.733239 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1e871a2c-75f5-4449-8324-84a7ed255bf2" path="/var/lib/kubelet/pods/1e871a2c-75f5-4449-8324-84a7ed255bf2/volumes" Sep 29 21:44:42 crc kubenswrapper[4911]: I0929 21:44:42.735011 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4625af6e-56d3-4c1e-bac1-1ba58400e121" path="/var/lib/kubelet/pods/4625af6e-56d3-4c1e-bac1-1ba58400e121/volumes" Sep 29 21:44:42 crc kubenswrapper[4911]: I0929 21:44:42.907059 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"b196d32a-bb27-4cc0-929f-c49f7a33a52a","Type":"ContainerStarted","Data":"7f9809e6198801f6189894ec8f08371597c89a6940ee7afc700adbf9e0c3e3af"} Sep 29 21:44:42 crc kubenswrapper[4911]: I0929 21:44:42.927004 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.926977496 podStartE2EDuration="2.926977496s" podCreationTimestamp="2025-09-29 21:44:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:44:42.924947902 +0000 UTC m=+1160.902060653" watchObservedRunningTime="2025-09-29 21:44:42.926977496 +0000 UTC m=+1160.904090177" Sep 29 21:44:42 crc kubenswrapper[4911]: I0929 21:44:42.984129 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 29 21:44:43 crc kubenswrapper[4911]: I0929 21:44:43.919734 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"64c7af34-3461-4dcd-9caf-5ff6f5fb90af","Type":"ContainerStarted","Data":"32ae6f8dce83ec7ffee4628f64bcaa0366cbf59af7558ff7c8deb57ded10c5e4"} Sep 29 21:44:43 crc kubenswrapper[4911]: I0929 21:44:43.920107 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"64c7af34-3461-4dcd-9caf-5ff6f5fb90af","Type":"ContainerStarted","Data":"d12a6ada10d0f6979d12c79d37c13fbff686f65ea03bac6ebe7cfd6d1be6c50f"} Sep 29 21:44:43 crc kubenswrapper[4911]: I0929 21:44:43.920131 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"64c7af34-3461-4dcd-9caf-5ff6f5fb90af","Type":"ContainerStarted","Data":"89d0ab7b6ae0bb47f237ab52de9c95943e6c82d6164a8691981d9eec60214e34"} Sep 29 21:44:43 crc kubenswrapper[4911]: I0929 21:44:43.943523 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.943498957 podStartE2EDuration="2.943498957s" podCreationTimestamp="2025-09-29 21:44:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:44:43.940079451 +0000 UTC m=+1161.917192142" watchObservedRunningTime="2025-09-29 21:44:43.943498957 +0000 UTC m=+1161.920611688" Sep 29 21:44:44 crc kubenswrapper[4911]: I0929 21:44:44.398355 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 29 21:44:44 crc kubenswrapper[4911]: I0929 21:44:44.398394 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 29 21:44:46 crc kubenswrapper[4911]: I0929 21:44:46.250783 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Sep 29 21:44:49 crc kubenswrapper[4911]: I0929 21:44:49.398804 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 29 21:44:49 crc kubenswrapper[4911]: I0929 21:44:49.399442 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 29 21:44:50 crc kubenswrapper[4911]: I0929 21:44:50.409002 4911 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="50bcb1f1-af2c-410c-bbca-fc0fd9b05f4e" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.203:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 29 21:44:50 crc kubenswrapper[4911]: I0929 21:44:50.409045 4911 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="50bcb1f1-af2c-410c-bbca-fc0fd9b05f4e" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.203:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 29 21:44:51 crc kubenswrapper[4911]: I0929 21:44:51.250647 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Sep 29 21:44:51 crc kubenswrapper[4911]: I0929 21:44:51.279896 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Sep 29 21:44:51 crc kubenswrapper[4911]: I0929 21:44:51.671711 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 29 21:44:51 crc kubenswrapper[4911]: I0929 21:44:51.671775 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 29 21:44:52 crc kubenswrapper[4911]: I0929 21:44:52.038818 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Sep 29 21:44:52 crc kubenswrapper[4911]: I0929 21:44:52.689987 4911 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="64c7af34-3461-4dcd-9caf-5ff6f5fb90af" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.205:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 29 21:44:52 crc kubenswrapper[4911]: I0929 21:44:52.689987 4911 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="64c7af34-3461-4dcd-9caf-5ff6f5fb90af" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.205:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 29 21:44:54 crc kubenswrapper[4911]: I0929 21:44:54.228413 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Sep 29 21:44:59 crc kubenswrapper[4911]: I0929 21:44:59.411399 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Sep 29 21:44:59 crc kubenswrapper[4911]: I0929 21:44:59.416041 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Sep 29 21:44:59 crc kubenswrapper[4911]: I0929 21:44:59.422641 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Sep 29 21:45:00 crc kubenswrapper[4911]: I0929 21:45:00.115093 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Sep 29 21:45:00 crc kubenswrapper[4911]: I0929 21:45:00.168361 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319705-8g9jr"] Sep 29 21:45:00 crc kubenswrapper[4911]: I0929 21:45:00.169883 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319705-8g9jr" Sep 29 21:45:00 crc kubenswrapper[4911]: I0929 21:45:00.179950 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 29 21:45:00 crc kubenswrapper[4911]: I0929 21:45:00.181547 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319705-8g9jr"] Sep 29 21:45:00 crc kubenswrapper[4911]: I0929 21:45:00.182680 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 29 21:45:00 crc kubenswrapper[4911]: I0929 21:45:00.249288 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/436f4a94-0a8c-4d88-8188-c5d0442dbdd6-config-volume\") pod \"collect-profiles-29319705-8g9jr\" (UID: \"436f4a94-0a8c-4d88-8188-c5d0442dbdd6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319705-8g9jr" Sep 29 21:45:00 crc kubenswrapper[4911]: I0929 21:45:00.249390 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mv5cw\" (UniqueName: \"kubernetes.io/projected/436f4a94-0a8c-4d88-8188-c5d0442dbdd6-kube-api-access-mv5cw\") pod \"collect-profiles-29319705-8g9jr\" (UID: \"436f4a94-0a8c-4d88-8188-c5d0442dbdd6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319705-8g9jr" Sep 29 21:45:00 crc kubenswrapper[4911]: I0929 21:45:00.249424 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/436f4a94-0a8c-4d88-8188-c5d0442dbdd6-secret-volume\") pod \"collect-profiles-29319705-8g9jr\" (UID: \"436f4a94-0a8c-4d88-8188-c5d0442dbdd6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319705-8g9jr" Sep 29 21:45:00 crc kubenswrapper[4911]: I0929 21:45:00.350988 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mv5cw\" (UniqueName: \"kubernetes.io/projected/436f4a94-0a8c-4d88-8188-c5d0442dbdd6-kube-api-access-mv5cw\") pod \"collect-profiles-29319705-8g9jr\" (UID: \"436f4a94-0a8c-4d88-8188-c5d0442dbdd6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319705-8g9jr" Sep 29 21:45:00 crc kubenswrapper[4911]: I0929 21:45:00.351039 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/436f4a94-0a8c-4d88-8188-c5d0442dbdd6-secret-volume\") pod \"collect-profiles-29319705-8g9jr\" (UID: \"436f4a94-0a8c-4d88-8188-c5d0442dbdd6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319705-8g9jr" Sep 29 21:45:00 crc kubenswrapper[4911]: I0929 21:45:00.351180 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/436f4a94-0a8c-4d88-8188-c5d0442dbdd6-config-volume\") pod \"collect-profiles-29319705-8g9jr\" (UID: \"436f4a94-0a8c-4d88-8188-c5d0442dbdd6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319705-8g9jr" Sep 29 21:45:00 crc kubenswrapper[4911]: I0929 21:45:00.352077 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/436f4a94-0a8c-4d88-8188-c5d0442dbdd6-config-volume\") pod \"collect-profiles-29319705-8g9jr\" (UID: \"436f4a94-0a8c-4d88-8188-c5d0442dbdd6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319705-8g9jr" Sep 29 21:45:00 crc kubenswrapper[4911]: I0929 21:45:00.358095 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/436f4a94-0a8c-4d88-8188-c5d0442dbdd6-secret-volume\") pod \"collect-profiles-29319705-8g9jr\" (UID: \"436f4a94-0a8c-4d88-8188-c5d0442dbdd6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319705-8g9jr" Sep 29 21:45:00 crc kubenswrapper[4911]: I0929 21:45:00.367467 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mv5cw\" (UniqueName: \"kubernetes.io/projected/436f4a94-0a8c-4d88-8188-c5d0442dbdd6-kube-api-access-mv5cw\") pod \"collect-profiles-29319705-8g9jr\" (UID: \"436f4a94-0a8c-4d88-8188-c5d0442dbdd6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319705-8g9jr" Sep 29 21:45:00 crc kubenswrapper[4911]: I0929 21:45:00.503698 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319705-8g9jr" Sep 29 21:45:00 crc kubenswrapper[4911]: I0929 21:45:00.957379 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319705-8g9jr"] Sep 29 21:45:00 crc kubenswrapper[4911]: W0929 21:45:00.958482 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod436f4a94_0a8c_4d88_8188_c5d0442dbdd6.slice/crio-4a2cdf32b5d5c8f12f6f496b0ad69f031b143a43cdd9ec749713daa80f81707b WatchSource:0}: Error finding container 4a2cdf32b5d5c8f12f6f496b0ad69f031b143a43cdd9ec749713daa80f81707b: Status 404 returned error can't find the container with id 4a2cdf32b5d5c8f12f6f496b0ad69f031b143a43cdd9ec749713daa80f81707b Sep 29 21:45:01 crc kubenswrapper[4911]: I0929 21:45:01.131361 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319705-8g9jr" event={"ID":"436f4a94-0a8c-4d88-8188-c5d0442dbdd6","Type":"ContainerStarted","Data":"4a2cdf32b5d5c8f12f6f496b0ad69f031b143a43cdd9ec749713daa80f81707b"} Sep 29 21:45:01 crc kubenswrapper[4911]: I0929 21:45:01.153683 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29319705-8g9jr" podStartSLOduration=1.153660109 podStartE2EDuration="1.153660109s" podCreationTimestamp="2025-09-29 21:45:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:45:01.145708042 +0000 UTC m=+1179.122820723" watchObservedRunningTime="2025-09-29 21:45:01.153660109 +0000 UTC m=+1179.130772790" Sep 29 21:45:01 crc kubenswrapper[4911]: I0929 21:45:01.679697 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Sep 29 21:45:01 crc kubenswrapper[4911]: I0929 21:45:01.680400 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Sep 29 21:45:01 crc kubenswrapper[4911]: I0929 21:45:01.681250 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Sep 29 21:45:01 crc kubenswrapper[4911]: I0929 21:45:01.686553 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Sep 29 21:45:02 crc kubenswrapper[4911]: I0929 21:45:02.145443 4911 generic.go:334] "Generic (PLEG): container finished" podID="436f4a94-0a8c-4d88-8188-c5d0442dbdd6" containerID="d48dd7c20d65b0de280abf27f2bb83891836c47f4216f688ea2f77e79884c19f" exitCode=0 Sep 29 21:45:02 crc kubenswrapper[4911]: I0929 21:45:02.145582 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319705-8g9jr" event={"ID":"436f4a94-0a8c-4d88-8188-c5d0442dbdd6","Type":"ContainerDied","Data":"d48dd7c20d65b0de280abf27f2bb83891836c47f4216f688ea2f77e79884c19f"} Sep 29 21:45:02 crc kubenswrapper[4911]: I0929 21:45:02.146752 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Sep 29 21:45:02 crc kubenswrapper[4911]: I0929 21:45:02.158302 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Sep 29 21:45:03 crc kubenswrapper[4911]: I0929 21:45:03.549820 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319705-8g9jr" Sep 29 21:45:03 crc kubenswrapper[4911]: I0929 21:45:03.618956 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mv5cw\" (UniqueName: \"kubernetes.io/projected/436f4a94-0a8c-4d88-8188-c5d0442dbdd6-kube-api-access-mv5cw\") pod \"436f4a94-0a8c-4d88-8188-c5d0442dbdd6\" (UID: \"436f4a94-0a8c-4d88-8188-c5d0442dbdd6\") " Sep 29 21:45:03 crc kubenswrapper[4911]: I0929 21:45:03.619174 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/436f4a94-0a8c-4d88-8188-c5d0442dbdd6-secret-volume\") pod \"436f4a94-0a8c-4d88-8188-c5d0442dbdd6\" (UID: \"436f4a94-0a8c-4d88-8188-c5d0442dbdd6\") " Sep 29 21:45:03 crc kubenswrapper[4911]: I0929 21:45:03.619271 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/436f4a94-0a8c-4d88-8188-c5d0442dbdd6-config-volume\") pod \"436f4a94-0a8c-4d88-8188-c5d0442dbdd6\" (UID: \"436f4a94-0a8c-4d88-8188-c5d0442dbdd6\") " Sep 29 21:45:03 crc kubenswrapper[4911]: I0929 21:45:03.620324 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/436f4a94-0a8c-4d88-8188-c5d0442dbdd6-config-volume" (OuterVolumeSpecName: "config-volume") pod "436f4a94-0a8c-4d88-8188-c5d0442dbdd6" (UID: "436f4a94-0a8c-4d88-8188-c5d0442dbdd6"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:45:03 crc kubenswrapper[4911]: I0929 21:45:03.625879 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/436f4a94-0a8c-4d88-8188-c5d0442dbdd6-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "436f4a94-0a8c-4d88-8188-c5d0442dbdd6" (UID: "436f4a94-0a8c-4d88-8188-c5d0442dbdd6"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:45:03 crc kubenswrapper[4911]: I0929 21:45:03.627354 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/436f4a94-0a8c-4d88-8188-c5d0442dbdd6-kube-api-access-mv5cw" (OuterVolumeSpecName: "kube-api-access-mv5cw") pod "436f4a94-0a8c-4d88-8188-c5d0442dbdd6" (UID: "436f4a94-0a8c-4d88-8188-c5d0442dbdd6"). InnerVolumeSpecName "kube-api-access-mv5cw". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:45:03 crc kubenswrapper[4911]: I0929 21:45:03.721265 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mv5cw\" (UniqueName: \"kubernetes.io/projected/436f4a94-0a8c-4d88-8188-c5d0442dbdd6-kube-api-access-mv5cw\") on node \"crc\" DevicePath \"\"" Sep 29 21:45:03 crc kubenswrapper[4911]: I0929 21:45:03.721309 4911 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/436f4a94-0a8c-4d88-8188-c5d0442dbdd6-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 29 21:45:03 crc kubenswrapper[4911]: I0929 21:45:03.721326 4911 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/436f4a94-0a8c-4d88-8188-c5d0442dbdd6-config-volume\") on node \"crc\" DevicePath \"\"" Sep 29 21:45:04 crc kubenswrapper[4911]: I0929 21:45:04.167161 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319705-8g9jr" event={"ID":"436f4a94-0a8c-4d88-8188-c5d0442dbdd6","Type":"ContainerDied","Data":"4a2cdf32b5d5c8f12f6f496b0ad69f031b143a43cdd9ec749713daa80f81707b"} Sep 29 21:45:04 crc kubenswrapper[4911]: I0929 21:45:04.167201 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319705-8g9jr" Sep 29 21:45:04 crc kubenswrapper[4911]: I0929 21:45:04.167212 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4a2cdf32b5d5c8f12f6f496b0ad69f031b143a43cdd9ec749713daa80f81707b" Sep 29 21:45:13 crc kubenswrapper[4911]: I0929 21:45:13.645713 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 21:45:13 crc kubenswrapper[4911]: I0929 21:45:13.646403 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7" containerName="ceilometer-central-agent" containerID="cri-o://273d75f7624d8845f71b4dfc3cef007bb1fc5786c51a91b92e6cf6d1d3463739" gracePeriod=30 Sep 29 21:45:13 crc kubenswrapper[4911]: I0929 21:45:13.646519 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7" containerName="ceilometer-notification-agent" containerID="cri-o://9f4d06a638604e97771791a805949518fe058a028ea83e723afa00eb779a5ebc" gracePeriod=30 Sep 29 21:45:13 crc kubenswrapper[4911]: I0929 21:45:13.646518 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7" containerName="sg-core" containerID="cri-o://eff336afb6da0f21d3ae1eadfaaa8ab3146491a84692faeaabdd537ff2a2e985" gracePeriod=30 Sep 29 21:45:13 crc kubenswrapper[4911]: I0929 21:45:13.646540 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7" containerName="proxy-httpd" containerID="cri-o://5cb5e22da0b1c37229ae817713095476a189fb658efca8fd730103319efff137" gracePeriod=30 Sep 29 21:45:13 crc kubenswrapper[4911]: I0929 21:45:13.784932 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 21:45:14 crc kubenswrapper[4911]: I0929 21:45:14.290759 4911 generic.go:334] "Generic (PLEG): container finished" podID="0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7" containerID="5cb5e22da0b1c37229ae817713095476a189fb658efca8fd730103319efff137" exitCode=0 Sep 29 21:45:14 crc kubenswrapper[4911]: I0929 21:45:14.290834 4911 generic.go:334] "Generic (PLEG): container finished" podID="0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7" containerID="eff336afb6da0f21d3ae1eadfaaa8ab3146491a84692faeaabdd537ff2a2e985" exitCode=2 Sep 29 21:45:14 crc kubenswrapper[4911]: I0929 21:45:14.290847 4911 generic.go:334] "Generic (PLEG): container finished" podID="0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7" containerID="273d75f7624d8845f71b4dfc3cef007bb1fc5786c51a91b92e6cf6d1d3463739" exitCode=0 Sep 29 21:45:14 crc kubenswrapper[4911]: I0929 21:45:14.290843 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7","Type":"ContainerDied","Data":"5cb5e22da0b1c37229ae817713095476a189fb658efca8fd730103319efff137"} Sep 29 21:45:14 crc kubenswrapper[4911]: I0929 21:45:14.290881 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7","Type":"ContainerDied","Data":"eff336afb6da0f21d3ae1eadfaaa8ab3146491a84692faeaabdd537ff2a2e985"} Sep 29 21:45:14 crc kubenswrapper[4911]: I0929 21:45:14.290892 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7","Type":"ContainerDied","Data":"273d75f7624d8845f71b4dfc3cef007bb1fc5786c51a91b92e6cf6d1d3463739"} Sep 29 21:45:15 crc kubenswrapper[4911]: I0929 21:45:15.220980 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 21:45:18 crc kubenswrapper[4911]: I0929 21:45:18.098316 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="16704d0f-ad69-4cc9-890a-77c268d78151" containerName="rabbitmq" containerID="cri-o://69fddeba00ec9a43661c93507c63cebfd0cabbe5003764a2097f0fb5cbe78287" gracePeriod=604796 Sep 29 21:45:18 crc kubenswrapper[4911]: I0929 21:45:18.331419 4911 generic.go:334] "Generic (PLEG): container finished" podID="0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7" containerID="9f4d06a638604e97771791a805949518fe058a028ea83e723afa00eb779a5ebc" exitCode=0 Sep 29 21:45:18 crc kubenswrapper[4911]: I0929 21:45:18.331458 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7","Type":"ContainerDied","Data":"9f4d06a638604e97771791a805949518fe058a028ea83e723afa00eb779a5ebc"} Sep 29 21:45:18 crc kubenswrapper[4911]: I0929 21:45:18.595805 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 21:45:18 crc kubenswrapper[4911]: I0929 21:45:18.730726 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tsx7g\" (UniqueName: \"kubernetes.io/projected/0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7-kube-api-access-tsx7g\") pod \"0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7\" (UID: \"0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7\") " Sep 29 21:45:18 crc kubenswrapper[4911]: I0929 21:45:18.730769 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7-config-data\") pod \"0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7\" (UID: \"0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7\") " Sep 29 21:45:18 crc kubenswrapper[4911]: I0929 21:45:18.730828 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7-run-httpd\") pod \"0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7\" (UID: \"0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7\") " Sep 29 21:45:18 crc kubenswrapper[4911]: I0929 21:45:18.730899 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7-log-httpd\") pod \"0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7\" (UID: \"0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7\") " Sep 29 21:45:18 crc kubenswrapper[4911]: I0929 21:45:18.731008 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7-ceilometer-tls-certs\") pod \"0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7\" (UID: \"0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7\") " Sep 29 21:45:18 crc kubenswrapper[4911]: I0929 21:45:18.731030 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7-combined-ca-bundle\") pod \"0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7\" (UID: \"0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7\") " Sep 29 21:45:18 crc kubenswrapper[4911]: I0929 21:45:18.731057 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7-sg-core-conf-yaml\") pod \"0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7\" (UID: \"0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7\") " Sep 29 21:45:18 crc kubenswrapper[4911]: I0929 21:45:18.731085 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7-scripts\") pod \"0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7\" (UID: \"0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7\") " Sep 29 21:45:18 crc kubenswrapper[4911]: I0929 21:45:18.731264 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7" (UID: "0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:45:18 crc kubenswrapper[4911]: I0929 21:45:18.732082 4911 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 21:45:18 crc kubenswrapper[4911]: I0929 21:45:18.732250 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7" (UID: "0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:45:18 crc kubenswrapper[4911]: I0929 21:45:18.751740 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7-kube-api-access-tsx7g" (OuterVolumeSpecName: "kube-api-access-tsx7g") pod "0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7" (UID: "0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7"). InnerVolumeSpecName "kube-api-access-tsx7g". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:45:18 crc kubenswrapper[4911]: I0929 21:45:18.752049 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7-scripts" (OuterVolumeSpecName: "scripts") pod "0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7" (UID: "0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:45:18 crc kubenswrapper[4911]: I0929 21:45:18.767419 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7" (UID: "0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:45:18 crc kubenswrapper[4911]: I0929 21:45:18.800835 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7" (UID: "0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:45:18 crc kubenswrapper[4911]: I0929 21:45:18.834398 4911 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 21:45:18 crc kubenswrapper[4911]: I0929 21:45:18.834432 4911 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 29 21:45:18 crc kubenswrapper[4911]: I0929 21:45:18.834442 4911 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 21:45:18 crc kubenswrapper[4911]: I0929 21:45:18.834452 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tsx7g\" (UniqueName: \"kubernetes.io/projected/0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7-kube-api-access-tsx7g\") on node \"crc\" DevicePath \"\"" Sep 29 21:45:18 crc kubenswrapper[4911]: I0929 21:45:18.834461 4911 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 21:45:18 crc kubenswrapper[4911]: I0929 21:45:18.844732 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7" (UID: "0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:45:18 crc kubenswrapper[4911]: I0929 21:45:18.869198 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7-config-data" (OuterVolumeSpecName: "config-data") pod "0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7" (UID: "0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:45:18 crc kubenswrapper[4911]: I0929 21:45:18.936041 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 21:45:18 crc kubenswrapper[4911]: I0929 21:45:18.936230 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 21:45:19 crc kubenswrapper[4911]: I0929 21:45:19.346592 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7","Type":"ContainerDied","Data":"e506de0974e98186f7421c88fdf6360fbed54cb7de5ffc4e62239dcb668b400e"} Sep 29 21:45:19 crc kubenswrapper[4911]: I0929 21:45:19.346656 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 21:45:19 crc kubenswrapper[4911]: I0929 21:45:19.347462 4911 scope.go:117] "RemoveContainer" containerID="5cb5e22da0b1c37229ae817713095476a189fb658efca8fd730103319efff137" Sep 29 21:45:19 crc kubenswrapper[4911]: I0929 21:45:19.368148 4911 scope.go:117] "RemoveContainer" containerID="eff336afb6da0f21d3ae1eadfaaa8ab3146491a84692faeaabdd537ff2a2e985" Sep 29 21:45:19 crc kubenswrapper[4911]: I0929 21:45:19.381141 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 21:45:19 crc kubenswrapper[4911]: I0929 21:45:19.388561 4911 scope.go:117] "RemoveContainer" containerID="9f4d06a638604e97771791a805949518fe058a028ea83e723afa00eb779a5ebc" Sep 29 21:45:19 crc kubenswrapper[4911]: I0929 21:45:19.390382 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 29 21:45:19 crc kubenswrapper[4911]: I0929 21:45:19.409808 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 29 21:45:19 crc kubenswrapper[4911]: I0929 21:45:19.410995 4911 scope.go:117] "RemoveContainer" containerID="273d75f7624d8845f71b4dfc3cef007bb1fc5786c51a91b92e6cf6d1d3463739" Sep 29 21:45:19 crc kubenswrapper[4911]: E0929 21:45:19.414042 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7" containerName="ceilometer-notification-agent" Sep 29 21:45:19 crc kubenswrapper[4911]: I0929 21:45:19.414130 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7" containerName="ceilometer-notification-agent" Sep 29 21:45:19 crc kubenswrapper[4911]: E0929 21:45:19.414215 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7" containerName="ceilometer-central-agent" Sep 29 21:45:19 crc kubenswrapper[4911]: I0929 21:45:19.414285 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7" containerName="ceilometer-central-agent" Sep 29 21:45:19 crc kubenswrapper[4911]: E0929 21:45:19.414356 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7" containerName="proxy-httpd" Sep 29 21:45:19 crc kubenswrapper[4911]: I0929 21:45:19.414438 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7" containerName="proxy-httpd" Sep 29 21:45:19 crc kubenswrapper[4911]: E0929 21:45:19.414527 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7" containerName="sg-core" Sep 29 21:45:19 crc kubenswrapper[4911]: I0929 21:45:19.414596 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7" containerName="sg-core" Sep 29 21:45:19 crc kubenswrapper[4911]: E0929 21:45:19.414673 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="436f4a94-0a8c-4d88-8188-c5d0442dbdd6" containerName="collect-profiles" Sep 29 21:45:19 crc kubenswrapper[4911]: I0929 21:45:19.414750 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="436f4a94-0a8c-4d88-8188-c5d0442dbdd6" containerName="collect-profiles" Sep 29 21:45:19 crc kubenswrapper[4911]: I0929 21:45:19.415185 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7" containerName="ceilometer-notification-agent" Sep 29 21:45:19 crc kubenswrapper[4911]: I0929 21:45:19.415259 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7" containerName="ceilometer-central-agent" Sep 29 21:45:19 crc kubenswrapper[4911]: I0929 21:45:19.415322 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="436f4a94-0a8c-4d88-8188-c5d0442dbdd6" containerName="collect-profiles" Sep 29 21:45:19 crc kubenswrapper[4911]: I0929 21:45:19.415387 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7" containerName="sg-core" Sep 29 21:45:19 crc kubenswrapper[4911]: I0929 21:45:19.415453 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7" containerName="proxy-httpd" Sep 29 21:45:19 crc kubenswrapper[4911]: I0929 21:45:19.419622 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 21:45:19 crc kubenswrapper[4911]: I0929 21:45:19.423571 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Sep 29 21:45:19 crc kubenswrapper[4911]: I0929 21:45:19.423672 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 29 21:45:19 crc kubenswrapper[4911]: I0929 21:45:19.427423 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 29 21:45:19 crc kubenswrapper[4911]: I0929 21:45:19.438315 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 21:45:19 crc kubenswrapper[4911]: I0929 21:45:19.549953 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/f523c771-a76c-4854-a62f-85e929e1a24b-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"f523c771-a76c-4854-a62f-85e929e1a24b\") " pod="openstack/ceilometer-0" Sep 29 21:45:19 crc kubenswrapper[4911]: I0929 21:45:19.550012 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hf94t\" (UniqueName: \"kubernetes.io/projected/f523c771-a76c-4854-a62f-85e929e1a24b-kube-api-access-hf94t\") pod \"ceilometer-0\" (UID: \"f523c771-a76c-4854-a62f-85e929e1a24b\") " pod="openstack/ceilometer-0" Sep 29 21:45:19 crc kubenswrapper[4911]: I0929 21:45:19.550050 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f523c771-a76c-4854-a62f-85e929e1a24b-config-data\") pod \"ceilometer-0\" (UID: \"f523c771-a76c-4854-a62f-85e929e1a24b\") " pod="openstack/ceilometer-0" Sep 29 21:45:19 crc kubenswrapper[4911]: I0929 21:45:19.550088 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f523c771-a76c-4854-a62f-85e929e1a24b-run-httpd\") pod \"ceilometer-0\" (UID: \"f523c771-a76c-4854-a62f-85e929e1a24b\") " pod="openstack/ceilometer-0" Sep 29 21:45:19 crc kubenswrapper[4911]: I0929 21:45:19.550110 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f523c771-a76c-4854-a62f-85e929e1a24b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f523c771-a76c-4854-a62f-85e929e1a24b\") " pod="openstack/ceilometer-0" Sep 29 21:45:19 crc kubenswrapper[4911]: I0929 21:45:19.550312 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f523c771-a76c-4854-a62f-85e929e1a24b-scripts\") pod \"ceilometer-0\" (UID: \"f523c771-a76c-4854-a62f-85e929e1a24b\") " pod="openstack/ceilometer-0" Sep 29 21:45:19 crc kubenswrapper[4911]: I0929 21:45:19.550449 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f523c771-a76c-4854-a62f-85e929e1a24b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f523c771-a76c-4854-a62f-85e929e1a24b\") " pod="openstack/ceilometer-0" Sep 29 21:45:19 crc kubenswrapper[4911]: I0929 21:45:19.550697 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f523c771-a76c-4854-a62f-85e929e1a24b-log-httpd\") pod \"ceilometer-0\" (UID: \"f523c771-a76c-4854-a62f-85e929e1a24b\") " pod="openstack/ceilometer-0" Sep 29 21:45:19 crc kubenswrapper[4911]: I0929 21:45:19.651886 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f523c771-a76c-4854-a62f-85e929e1a24b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f523c771-a76c-4854-a62f-85e929e1a24b\") " pod="openstack/ceilometer-0" Sep 29 21:45:19 crc kubenswrapper[4911]: I0929 21:45:19.652279 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f523c771-a76c-4854-a62f-85e929e1a24b-log-httpd\") pod \"ceilometer-0\" (UID: \"f523c771-a76c-4854-a62f-85e929e1a24b\") " pod="openstack/ceilometer-0" Sep 29 21:45:19 crc kubenswrapper[4911]: I0929 21:45:19.652418 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/f523c771-a76c-4854-a62f-85e929e1a24b-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"f523c771-a76c-4854-a62f-85e929e1a24b\") " pod="openstack/ceilometer-0" Sep 29 21:45:19 crc kubenswrapper[4911]: I0929 21:45:19.652540 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hf94t\" (UniqueName: \"kubernetes.io/projected/f523c771-a76c-4854-a62f-85e929e1a24b-kube-api-access-hf94t\") pod \"ceilometer-0\" (UID: \"f523c771-a76c-4854-a62f-85e929e1a24b\") " pod="openstack/ceilometer-0" Sep 29 21:45:19 crc kubenswrapper[4911]: I0929 21:45:19.652656 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f523c771-a76c-4854-a62f-85e929e1a24b-config-data\") pod \"ceilometer-0\" (UID: \"f523c771-a76c-4854-a62f-85e929e1a24b\") " pod="openstack/ceilometer-0" Sep 29 21:45:19 crc kubenswrapper[4911]: I0929 21:45:19.652804 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f523c771-a76c-4854-a62f-85e929e1a24b-run-httpd\") pod \"ceilometer-0\" (UID: \"f523c771-a76c-4854-a62f-85e929e1a24b\") " pod="openstack/ceilometer-0" Sep 29 21:45:19 crc kubenswrapper[4911]: I0929 21:45:19.652899 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f523c771-a76c-4854-a62f-85e929e1a24b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f523c771-a76c-4854-a62f-85e929e1a24b\") " pod="openstack/ceilometer-0" Sep 29 21:45:19 crc kubenswrapper[4911]: I0929 21:45:19.653499 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f523c771-a76c-4854-a62f-85e929e1a24b-run-httpd\") pod \"ceilometer-0\" (UID: \"f523c771-a76c-4854-a62f-85e929e1a24b\") " pod="openstack/ceilometer-0" Sep 29 21:45:19 crc kubenswrapper[4911]: I0929 21:45:19.653584 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f523c771-a76c-4854-a62f-85e929e1a24b-log-httpd\") pod \"ceilometer-0\" (UID: \"f523c771-a76c-4854-a62f-85e929e1a24b\") " pod="openstack/ceilometer-0" Sep 29 21:45:19 crc kubenswrapper[4911]: I0929 21:45:19.653714 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f523c771-a76c-4854-a62f-85e929e1a24b-scripts\") pod \"ceilometer-0\" (UID: \"f523c771-a76c-4854-a62f-85e929e1a24b\") " pod="openstack/ceilometer-0" Sep 29 21:45:19 crc kubenswrapper[4911]: I0929 21:45:19.657966 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f523c771-a76c-4854-a62f-85e929e1a24b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f523c771-a76c-4854-a62f-85e929e1a24b\") " pod="openstack/ceilometer-0" Sep 29 21:45:19 crc kubenswrapper[4911]: I0929 21:45:19.658320 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/f523c771-a76c-4854-a62f-85e929e1a24b-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"f523c771-a76c-4854-a62f-85e929e1a24b\") " pod="openstack/ceilometer-0" Sep 29 21:45:19 crc kubenswrapper[4911]: I0929 21:45:19.658601 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f523c771-a76c-4854-a62f-85e929e1a24b-config-data\") pod \"ceilometer-0\" (UID: \"f523c771-a76c-4854-a62f-85e929e1a24b\") " pod="openstack/ceilometer-0" Sep 29 21:45:19 crc kubenswrapper[4911]: I0929 21:45:19.659470 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f523c771-a76c-4854-a62f-85e929e1a24b-scripts\") pod \"ceilometer-0\" (UID: \"f523c771-a76c-4854-a62f-85e929e1a24b\") " pod="openstack/ceilometer-0" Sep 29 21:45:19 crc kubenswrapper[4911]: I0929 21:45:19.667230 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f523c771-a76c-4854-a62f-85e929e1a24b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f523c771-a76c-4854-a62f-85e929e1a24b\") " pod="openstack/ceilometer-0" Sep 29 21:45:19 crc kubenswrapper[4911]: I0929 21:45:19.670611 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hf94t\" (UniqueName: \"kubernetes.io/projected/f523c771-a76c-4854-a62f-85e929e1a24b-kube-api-access-hf94t\") pod \"ceilometer-0\" (UID: \"f523c771-a76c-4854-a62f-85e929e1a24b\") " pod="openstack/ceilometer-0" Sep 29 21:45:19 crc kubenswrapper[4911]: I0929 21:45:19.738913 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 21:45:20 crc kubenswrapper[4911]: I0929 21:45:20.053805 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="54bedb11-6943-4e34-a221-8dbd2cfd5eee" containerName="rabbitmq" containerID="cri-o://4a91ba752e02ad019e74614fab8db0819aae4221643a836bef5f3dd960c39ac3" gracePeriod=604796 Sep 29 21:45:20 crc kubenswrapper[4911]: I0929 21:45:20.221866 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 21:45:20 crc kubenswrapper[4911]: I0929 21:45:20.355300 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f523c771-a76c-4854-a62f-85e929e1a24b","Type":"ContainerStarted","Data":"5b765ea2e53b75bce1124d4de7943105c74eda1d5528d7716858d5dfb32b6222"} Sep 29 21:45:20 crc kubenswrapper[4911]: I0929 21:45:20.714293 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7" path="/var/lib/kubelet/pods/0ec5b6d4-5b91-4fb9-b4c4-0fbfd18b0ca7/volumes" Sep 29 21:45:24 crc kubenswrapper[4911]: I0929 21:45:24.256690 4911 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="16704d0f-ad69-4cc9-890a-77c268d78151" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.105:5671: connect: connection refused" Sep 29 21:45:24 crc kubenswrapper[4911]: I0929 21:45:24.410917 4911 generic.go:334] "Generic (PLEG): container finished" podID="16704d0f-ad69-4cc9-890a-77c268d78151" containerID="69fddeba00ec9a43661c93507c63cebfd0cabbe5003764a2097f0fb5cbe78287" exitCode=0 Sep 29 21:45:24 crc kubenswrapper[4911]: I0929 21:45:24.411244 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"16704d0f-ad69-4cc9-890a-77c268d78151","Type":"ContainerDied","Data":"69fddeba00ec9a43661c93507c63cebfd0cabbe5003764a2097f0fb5cbe78287"} Sep 29 21:45:24 crc kubenswrapper[4911]: I0929 21:45:24.506903 4911 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="54bedb11-6943-4e34-a221-8dbd2cfd5eee" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.106:5671: connect: connection refused" Sep 29 21:45:26 crc kubenswrapper[4911]: I0929 21:45:26.457475 4911 generic.go:334] "Generic (PLEG): container finished" podID="54bedb11-6943-4e34-a221-8dbd2cfd5eee" containerID="4a91ba752e02ad019e74614fab8db0819aae4221643a836bef5f3dd960c39ac3" exitCode=0 Sep 29 21:45:26 crc kubenswrapper[4911]: I0929 21:45:26.457855 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"54bedb11-6943-4e34-a221-8dbd2cfd5eee","Type":"ContainerDied","Data":"4a91ba752e02ad019e74614fab8db0819aae4221643a836bef5f3dd960c39ac3"} Sep 29 21:45:27 crc kubenswrapper[4911]: I0929 21:45:27.097568 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-n47q5"] Sep 29 21:45:27 crc kubenswrapper[4911]: I0929 21:45:27.100091 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67b789f86c-n47q5" Sep 29 21:45:27 crc kubenswrapper[4911]: I0929 21:45:27.102109 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-edpm-ipam" Sep 29 21:45:27 crc kubenswrapper[4911]: I0929 21:45:27.122133 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-n47q5"] Sep 29 21:45:27 crc kubenswrapper[4911]: I0929 21:45:27.220588 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xmfkt\" (UniqueName: \"kubernetes.io/projected/b47dfd70-23bd-4c0c-badf-0ac99f95f0cd-kube-api-access-xmfkt\") pod \"dnsmasq-dns-67b789f86c-n47q5\" (UID: \"b47dfd70-23bd-4c0c-badf-0ac99f95f0cd\") " pod="openstack/dnsmasq-dns-67b789f86c-n47q5" Sep 29 21:45:27 crc kubenswrapper[4911]: I0929 21:45:27.220651 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/b47dfd70-23bd-4c0c-badf-0ac99f95f0cd-openstack-edpm-ipam\") pod \"dnsmasq-dns-67b789f86c-n47q5\" (UID: \"b47dfd70-23bd-4c0c-badf-0ac99f95f0cd\") " pod="openstack/dnsmasq-dns-67b789f86c-n47q5" Sep 29 21:45:27 crc kubenswrapper[4911]: I0929 21:45:27.220719 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b47dfd70-23bd-4c0c-badf-0ac99f95f0cd-ovsdbserver-nb\") pod \"dnsmasq-dns-67b789f86c-n47q5\" (UID: \"b47dfd70-23bd-4c0c-badf-0ac99f95f0cd\") " pod="openstack/dnsmasq-dns-67b789f86c-n47q5" Sep 29 21:45:27 crc kubenswrapper[4911]: I0929 21:45:27.220744 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b47dfd70-23bd-4c0c-badf-0ac99f95f0cd-dns-swift-storage-0\") pod \"dnsmasq-dns-67b789f86c-n47q5\" (UID: \"b47dfd70-23bd-4c0c-badf-0ac99f95f0cd\") " pod="openstack/dnsmasq-dns-67b789f86c-n47q5" Sep 29 21:45:27 crc kubenswrapper[4911]: I0929 21:45:27.220760 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b47dfd70-23bd-4c0c-badf-0ac99f95f0cd-ovsdbserver-sb\") pod \"dnsmasq-dns-67b789f86c-n47q5\" (UID: \"b47dfd70-23bd-4c0c-badf-0ac99f95f0cd\") " pod="openstack/dnsmasq-dns-67b789f86c-n47q5" Sep 29 21:45:27 crc kubenswrapper[4911]: I0929 21:45:27.220841 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b47dfd70-23bd-4c0c-badf-0ac99f95f0cd-config\") pod \"dnsmasq-dns-67b789f86c-n47q5\" (UID: \"b47dfd70-23bd-4c0c-badf-0ac99f95f0cd\") " pod="openstack/dnsmasq-dns-67b789f86c-n47q5" Sep 29 21:45:27 crc kubenswrapper[4911]: I0929 21:45:27.220872 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b47dfd70-23bd-4c0c-badf-0ac99f95f0cd-dns-svc\") pod \"dnsmasq-dns-67b789f86c-n47q5\" (UID: \"b47dfd70-23bd-4c0c-badf-0ac99f95f0cd\") " pod="openstack/dnsmasq-dns-67b789f86c-n47q5" Sep 29 21:45:27 crc kubenswrapper[4911]: I0929 21:45:27.322726 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b47dfd70-23bd-4c0c-badf-0ac99f95f0cd-ovsdbserver-nb\") pod \"dnsmasq-dns-67b789f86c-n47q5\" (UID: \"b47dfd70-23bd-4c0c-badf-0ac99f95f0cd\") " pod="openstack/dnsmasq-dns-67b789f86c-n47q5" Sep 29 21:45:27 crc kubenswrapper[4911]: I0929 21:45:27.322786 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b47dfd70-23bd-4c0c-badf-0ac99f95f0cd-dns-swift-storage-0\") pod \"dnsmasq-dns-67b789f86c-n47q5\" (UID: \"b47dfd70-23bd-4c0c-badf-0ac99f95f0cd\") " pod="openstack/dnsmasq-dns-67b789f86c-n47q5" Sep 29 21:45:27 crc kubenswrapper[4911]: I0929 21:45:27.322833 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b47dfd70-23bd-4c0c-badf-0ac99f95f0cd-ovsdbserver-sb\") pod \"dnsmasq-dns-67b789f86c-n47q5\" (UID: \"b47dfd70-23bd-4c0c-badf-0ac99f95f0cd\") " pod="openstack/dnsmasq-dns-67b789f86c-n47q5" Sep 29 21:45:27 crc kubenswrapper[4911]: I0929 21:45:27.322898 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b47dfd70-23bd-4c0c-badf-0ac99f95f0cd-config\") pod \"dnsmasq-dns-67b789f86c-n47q5\" (UID: \"b47dfd70-23bd-4c0c-badf-0ac99f95f0cd\") " pod="openstack/dnsmasq-dns-67b789f86c-n47q5" Sep 29 21:45:27 crc kubenswrapper[4911]: I0929 21:45:27.322923 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b47dfd70-23bd-4c0c-badf-0ac99f95f0cd-dns-svc\") pod \"dnsmasq-dns-67b789f86c-n47q5\" (UID: \"b47dfd70-23bd-4c0c-badf-0ac99f95f0cd\") " pod="openstack/dnsmasq-dns-67b789f86c-n47q5" Sep 29 21:45:27 crc kubenswrapper[4911]: I0929 21:45:27.322951 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xmfkt\" (UniqueName: \"kubernetes.io/projected/b47dfd70-23bd-4c0c-badf-0ac99f95f0cd-kube-api-access-xmfkt\") pod \"dnsmasq-dns-67b789f86c-n47q5\" (UID: \"b47dfd70-23bd-4c0c-badf-0ac99f95f0cd\") " pod="openstack/dnsmasq-dns-67b789f86c-n47q5" Sep 29 21:45:27 crc kubenswrapper[4911]: I0929 21:45:27.322983 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/b47dfd70-23bd-4c0c-badf-0ac99f95f0cd-openstack-edpm-ipam\") pod \"dnsmasq-dns-67b789f86c-n47q5\" (UID: \"b47dfd70-23bd-4c0c-badf-0ac99f95f0cd\") " pod="openstack/dnsmasq-dns-67b789f86c-n47q5" Sep 29 21:45:27 crc kubenswrapper[4911]: I0929 21:45:27.323829 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b47dfd70-23bd-4c0c-badf-0ac99f95f0cd-ovsdbserver-nb\") pod \"dnsmasq-dns-67b789f86c-n47q5\" (UID: \"b47dfd70-23bd-4c0c-badf-0ac99f95f0cd\") " pod="openstack/dnsmasq-dns-67b789f86c-n47q5" Sep 29 21:45:27 crc kubenswrapper[4911]: I0929 21:45:27.324000 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b47dfd70-23bd-4c0c-badf-0ac99f95f0cd-dns-svc\") pod \"dnsmasq-dns-67b789f86c-n47q5\" (UID: \"b47dfd70-23bd-4c0c-badf-0ac99f95f0cd\") " pod="openstack/dnsmasq-dns-67b789f86c-n47q5" Sep 29 21:45:27 crc kubenswrapper[4911]: I0929 21:45:27.324705 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b47dfd70-23bd-4c0c-badf-0ac99f95f0cd-dns-swift-storage-0\") pod \"dnsmasq-dns-67b789f86c-n47q5\" (UID: \"b47dfd70-23bd-4c0c-badf-0ac99f95f0cd\") " pod="openstack/dnsmasq-dns-67b789f86c-n47q5" Sep 29 21:45:27 crc kubenswrapper[4911]: I0929 21:45:27.325035 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b47dfd70-23bd-4c0c-badf-0ac99f95f0cd-ovsdbserver-sb\") pod \"dnsmasq-dns-67b789f86c-n47q5\" (UID: \"b47dfd70-23bd-4c0c-badf-0ac99f95f0cd\") " pod="openstack/dnsmasq-dns-67b789f86c-n47q5" Sep 29 21:45:27 crc kubenswrapper[4911]: I0929 21:45:27.325144 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/b47dfd70-23bd-4c0c-badf-0ac99f95f0cd-openstack-edpm-ipam\") pod \"dnsmasq-dns-67b789f86c-n47q5\" (UID: \"b47dfd70-23bd-4c0c-badf-0ac99f95f0cd\") " pod="openstack/dnsmasq-dns-67b789f86c-n47q5" Sep 29 21:45:27 crc kubenswrapper[4911]: I0929 21:45:27.326236 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b47dfd70-23bd-4c0c-badf-0ac99f95f0cd-config\") pod \"dnsmasq-dns-67b789f86c-n47q5\" (UID: \"b47dfd70-23bd-4c0c-badf-0ac99f95f0cd\") " pod="openstack/dnsmasq-dns-67b789f86c-n47q5" Sep 29 21:45:27 crc kubenswrapper[4911]: I0929 21:45:27.345482 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xmfkt\" (UniqueName: \"kubernetes.io/projected/b47dfd70-23bd-4c0c-badf-0ac99f95f0cd-kube-api-access-xmfkt\") pod \"dnsmasq-dns-67b789f86c-n47q5\" (UID: \"b47dfd70-23bd-4c0c-badf-0ac99f95f0cd\") " pod="openstack/dnsmasq-dns-67b789f86c-n47q5" Sep 29 21:45:27 crc kubenswrapper[4911]: I0929 21:45:27.435879 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67b789f86c-n47q5" Sep 29 21:45:34 crc kubenswrapper[4911]: I0929 21:45:34.890112 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:45:34 crc kubenswrapper[4911]: I0929 21:45:34.900971 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 29 21:45:34 crc kubenswrapper[4911]: I0929 21:45:34.994976 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/54bedb11-6943-4e34-a221-8dbd2cfd5eee-erlang-cookie-secret\") pod \"54bedb11-6943-4e34-a221-8dbd2cfd5eee\" (UID: \"54bedb11-6943-4e34-a221-8dbd2cfd5eee\") " Sep 29 21:45:34 crc kubenswrapper[4911]: I0929 21:45:34.995031 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/54bedb11-6943-4e34-a221-8dbd2cfd5eee-pod-info\") pod \"54bedb11-6943-4e34-a221-8dbd2cfd5eee\" (UID: \"54bedb11-6943-4e34-a221-8dbd2cfd5eee\") " Sep 29 21:45:34 crc kubenswrapper[4911]: I0929 21:45:34.995078 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/16704d0f-ad69-4cc9-890a-77c268d78151-rabbitmq-plugins\") pod \"16704d0f-ad69-4cc9-890a-77c268d78151\" (UID: \"16704d0f-ad69-4cc9-890a-77c268d78151\") " Sep 29 21:45:34 crc kubenswrapper[4911]: I0929 21:45:34.995161 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/16704d0f-ad69-4cc9-890a-77c268d78151-config-data\") pod \"16704d0f-ad69-4cc9-890a-77c268d78151\" (UID: \"16704d0f-ad69-4cc9-890a-77c268d78151\") " Sep 29 21:45:34 crc kubenswrapper[4911]: I0929 21:45:34.995191 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/16704d0f-ad69-4cc9-890a-77c268d78151-erlang-cookie-secret\") pod \"16704d0f-ad69-4cc9-890a-77c268d78151\" (UID: \"16704d0f-ad69-4cc9-890a-77c268d78151\") " Sep 29 21:45:34 crc kubenswrapper[4911]: I0929 21:45:34.995226 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/16704d0f-ad69-4cc9-890a-77c268d78151-pod-info\") pod \"16704d0f-ad69-4cc9-890a-77c268d78151\" (UID: \"16704d0f-ad69-4cc9-890a-77c268d78151\") " Sep 29 21:45:34 crc kubenswrapper[4911]: I0929 21:45:34.995284 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/16704d0f-ad69-4cc9-890a-77c268d78151-rabbitmq-erlang-cookie\") pod \"16704d0f-ad69-4cc9-890a-77c268d78151\" (UID: \"16704d0f-ad69-4cc9-890a-77c268d78151\") " Sep 29 21:45:34 crc kubenswrapper[4911]: I0929 21:45:34.995316 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"54bedb11-6943-4e34-a221-8dbd2cfd5eee\" (UID: \"54bedb11-6943-4e34-a221-8dbd2cfd5eee\") " Sep 29 21:45:34 crc kubenswrapper[4911]: I0929 21:45:34.995353 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/54bedb11-6943-4e34-a221-8dbd2cfd5eee-rabbitmq-plugins\") pod \"54bedb11-6943-4e34-a221-8dbd2cfd5eee\" (UID: \"54bedb11-6943-4e34-a221-8dbd2cfd5eee\") " Sep 29 21:45:34 crc kubenswrapper[4911]: I0929 21:45:34.995422 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/54bedb11-6943-4e34-a221-8dbd2cfd5eee-server-conf\") pod \"54bedb11-6943-4e34-a221-8dbd2cfd5eee\" (UID: \"54bedb11-6943-4e34-a221-8dbd2cfd5eee\") " Sep 29 21:45:34 crc kubenswrapper[4911]: I0929 21:45:34.995458 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/54bedb11-6943-4e34-a221-8dbd2cfd5eee-plugins-conf\") pod \"54bedb11-6943-4e34-a221-8dbd2cfd5eee\" (UID: \"54bedb11-6943-4e34-a221-8dbd2cfd5eee\") " Sep 29 21:45:34 crc kubenswrapper[4911]: I0929 21:45:34.995481 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/54bedb11-6943-4e34-a221-8dbd2cfd5eee-rabbitmq-confd\") pod \"54bedb11-6943-4e34-a221-8dbd2cfd5eee\" (UID: \"54bedb11-6943-4e34-a221-8dbd2cfd5eee\") " Sep 29 21:45:34 crc kubenswrapper[4911]: I0929 21:45:34.995534 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/16704d0f-ad69-4cc9-890a-77c268d78151-rabbitmq-confd\") pod \"16704d0f-ad69-4cc9-890a-77c268d78151\" (UID: \"16704d0f-ad69-4cc9-890a-77c268d78151\") " Sep 29 21:45:34 crc kubenswrapper[4911]: I0929 21:45:34.995574 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/54bedb11-6943-4e34-a221-8dbd2cfd5eee-rabbitmq-erlang-cookie\") pod \"54bedb11-6943-4e34-a221-8dbd2cfd5eee\" (UID: \"54bedb11-6943-4e34-a221-8dbd2cfd5eee\") " Sep 29 21:45:34 crc kubenswrapper[4911]: I0929 21:45:34.995925 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/54bedb11-6943-4e34-a221-8dbd2cfd5eee-config-data\") pod \"54bedb11-6943-4e34-a221-8dbd2cfd5eee\" (UID: \"54bedb11-6943-4e34-a221-8dbd2cfd5eee\") " Sep 29 21:45:34 crc kubenswrapper[4911]: I0929 21:45:34.995955 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7cx7z\" (UniqueName: \"kubernetes.io/projected/16704d0f-ad69-4cc9-890a-77c268d78151-kube-api-access-7cx7z\") pod \"16704d0f-ad69-4cc9-890a-77c268d78151\" (UID: \"16704d0f-ad69-4cc9-890a-77c268d78151\") " Sep 29 21:45:34 crc kubenswrapper[4911]: I0929 21:45:34.995979 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/16704d0f-ad69-4cc9-890a-77c268d78151-plugins-conf\") pod \"16704d0f-ad69-4cc9-890a-77c268d78151\" (UID: \"16704d0f-ad69-4cc9-890a-77c268d78151\") " Sep 29 21:45:34 crc kubenswrapper[4911]: I0929 21:45:34.996012 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"16704d0f-ad69-4cc9-890a-77c268d78151\" (UID: \"16704d0f-ad69-4cc9-890a-77c268d78151\") " Sep 29 21:45:34 crc kubenswrapper[4911]: I0929 21:45:34.996008 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/16704d0f-ad69-4cc9-890a-77c268d78151-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "16704d0f-ad69-4cc9-890a-77c268d78151" (UID: "16704d0f-ad69-4cc9-890a-77c268d78151"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:45:34 crc kubenswrapper[4911]: I0929 21:45:34.996046 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/16704d0f-ad69-4cc9-890a-77c268d78151-server-conf\") pod \"16704d0f-ad69-4cc9-890a-77c268d78151\" (UID: \"16704d0f-ad69-4cc9-890a-77c268d78151\") " Sep 29 21:45:34 crc kubenswrapper[4911]: I0929 21:45:34.996165 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/54bedb11-6943-4e34-a221-8dbd2cfd5eee-rabbitmq-tls\") pod \"54bedb11-6943-4e34-a221-8dbd2cfd5eee\" (UID: \"54bedb11-6943-4e34-a221-8dbd2cfd5eee\") " Sep 29 21:45:34 crc kubenswrapper[4911]: I0929 21:45:34.996194 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9rhbd\" (UniqueName: \"kubernetes.io/projected/54bedb11-6943-4e34-a221-8dbd2cfd5eee-kube-api-access-9rhbd\") pod \"54bedb11-6943-4e34-a221-8dbd2cfd5eee\" (UID: \"54bedb11-6943-4e34-a221-8dbd2cfd5eee\") " Sep 29 21:45:34 crc kubenswrapper[4911]: I0929 21:45:34.996199 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/54bedb11-6943-4e34-a221-8dbd2cfd5eee-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "54bedb11-6943-4e34-a221-8dbd2cfd5eee" (UID: "54bedb11-6943-4e34-a221-8dbd2cfd5eee"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:45:34 crc kubenswrapper[4911]: I0929 21:45:34.996219 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/16704d0f-ad69-4cc9-890a-77c268d78151-rabbitmq-tls\") pod \"16704d0f-ad69-4cc9-890a-77c268d78151\" (UID: \"16704d0f-ad69-4cc9-890a-77c268d78151\") " Sep 29 21:45:34 crc kubenswrapper[4911]: I0929 21:45:34.997923 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/16704d0f-ad69-4cc9-890a-77c268d78151-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "16704d0f-ad69-4cc9-890a-77c268d78151" (UID: "16704d0f-ad69-4cc9-890a-77c268d78151"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:34.999308 4911 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/16704d0f-ad69-4cc9-890a-77c268d78151-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:34.999338 4911 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/54bedb11-6943-4e34-a221-8dbd2cfd5eee-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:34.999348 4911 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/16704d0f-ad69-4cc9-890a-77c268d78151-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:34.999782 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/54bedb11-6943-4e34-a221-8dbd2cfd5eee-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "54bedb11-6943-4e34-a221-8dbd2cfd5eee" (UID: "54bedb11-6943-4e34-a221-8dbd2cfd5eee"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.002506 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16704d0f-ad69-4cc9-890a-77c268d78151-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "16704d0f-ad69-4cc9-890a-77c268d78151" (UID: "16704d0f-ad69-4cc9-890a-77c268d78151"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.007000 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/54bedb11-6943-4e34-a221-8dbd2cfd5eee-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "54bedb11-6943-4e34-a221-8dbd2cfd5eee" (UID: "54bedb11-6943-4e34-a221-8dbd2cfd5eee"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.007130 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/54bedb11-6943-4e34-a221-8dbd2cfd5eee-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "54bedb11-6943-4e34-a221-8dbd2cfd5eee" (UID: "54bedb11-6943-4e34-a221-8dbd2cfd5eee"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.007512 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/16704d0f-ad69-4cc9-890a-77c268d78151-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "16704d0f-ad69-4cc9-890a-77c268d78151" (UID: "16704d0f-ad69-4cc9-890a-77c268d78151"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.010361 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/16704d0f-ad69-4cc9-890a-77c268d78151-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "16704d0f-ad69-4cc9-890a-77c268d78151" (UID: "16704d0f-ad69-4cc9-890a-77c268d78151"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.010571 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54bedb11-6943-4e34-a221-8dbd2cfd5eee-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "54bedb11-6943-4e34-a221-8dbd2cfd5eee" (UID: "54bedb11-6943-4e34-a221-8dbd2cfd5eee"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.013576 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/54bedb11-6943-4e34-a221-8dbd2cfd5eee-kube-api-access-9rhbd" (OuterVolumeSpecName: "kube-api-access-9rhbd") pod "54bedb11-6943-4e34-a221-8dbd2cfd5eee" (UID: "54bedb11-6943-4e34-a221-8dbd2cfd5eee"). InnerVolumeSpecName "kube-api-access-9rhbd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.014292 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/54bedb11-6943-4e34-a221-8dbd2cfd5eee-pod-info" (OuterVolumeSpecName: "pod-info") pod "54bedb11-6943-4e34-a221-8dbd2cfd5eee" (UID: "54bedb11-6943-4e34-a221-8dbd2cfd5eee"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.015082 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage06-crc" (OuterVolumeSpecName: "persistence") pod "16704d0f-ad69-4cc9-890a-77c268d78151" (UID: "16704d0f-ad69-4cc9-890a-77c268d78151"). InnerVolumeSpecName "local-storage06-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.015251 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "persistence") pod "54bedb11-6943-4e34-a221-8dbd2cfd5eee" (UID: "54bedb11-6943-4e34-a221-8dbd2cfd5eee"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.018527 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/16704d0f-ad69-4cc9-890a-77c268d78151-pod-info" (OuterVolumeSpecName: "pod-info") pod "16704d0f-ad69-4cc9-890a-77c268d78151" (UID: "16704d0f-ad69-4cc9-890a-77c268d78151"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.021190 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/16704d0f-ad69-4cc9-890a-77c268d78151-kube-api-access-7cx7z" (OuterVolumeSpecName: "kube-api-access-7cx7z") pod "16704d0f-ad69-4cc9-890a-77c268d78151" (UID: "16704d0f-ad69-4cc9-890a-77c268d78151"). InnerVolumeSpecName "kube-api-access-7cx7z". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.042234 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/54bedb11-6943-4e34-a221-8dbd2cfd5eee-config-data" (OuterVolumeSpecName: "config-data") pod "54bedb11-6943-4e34-a221-8dbd2cfd5eee" (UID: "54bedb11-6943-4e34-a221-8dbd2cfd5eee"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.059408 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/16704d0f-ad69-4cc9-890a-77c268d78151-config-data" (OuterVolumeSpecName: "config-data") pod "16704d0f-ad69-4cc9-890a-77c268d78151" (UID: "16704d0f-ad69-4cc9-890a-77c268d78151"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.101214 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/16704d0f-ad69-4cc9-890a-77c268d78151-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.101246 4911 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/16704d0f-ad69-4cc9-890a-77c268d78151-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.101258 4911 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/16704d0f-ad69-4cc9-890a-77c268d78151-pod-info\") on node \"crc\" DevicePath \"\"" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.101280 4911 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.101299 4911 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/54bedb11-6943-4e34-a221-8dbd2cfd5eee-plugins-conf\") on node \"crc\" DevicePath \"\"" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.101309 4911 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/54bedb11-6943-4e34-a221-8dbd2cfd5eee-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.101317 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/54bedb11-6943-4e34-a221-8dbd2cfd5eee-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.101328 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7cx7z\" (UniqueName: \"kubernetes.io/projected/16704d0f-ad69-4cc9-890a-77c268d78151-kube-api-access-7cx7z\") on node \"crc\" DevicePath \"\"" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.101337 4911 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/16704d0f-ad69-4cc9-890a-77c268d78151-plugins-conf\") on node \"crc\" DevicePath \"\"" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.101352 4911 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" " Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.101363 4911 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/54bedb11-6943-4e34-a221-8dbd2cfd5eee-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.101372 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9rhbd\" (UniqueName: \"kubernetes.io/projected/54bedb11-6943-4e34-a221-8dbd2cfd5eee-kube-api-access-9rhbd\") on node \"crc\" DevicePath \"\"" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.101381 4911 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/16704d0f-ad69-4cc9-890a-77c268d78151-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.101390 4911 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/54bedb11-6943-4e34-a221-8dbd2cfd5eee-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.101397 4911 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/54bedb11-6943-4e34-a221-8dbd2cfd5eee-pod-info\") on node \"crc\" DevicePath \"\"" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.116550 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/16704d0f-ad69-4cc9-890a-77c268d78151-server-conf" (OuterVolumeSpecName: "server-conf") pod "16704d0f-ad69-4cc9-890a-77c268d78151" (UID: "16704d0f-ad69-4cc9-890a-77c268d78151"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.154710 4911 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage06-crc" (UniqueName: "kubernetes.io/local-volume/local-storage06-crc") on node "crc" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.154820 4911 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.160457 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/16704d0f-ad69-4cc9-890a-77c268d78151-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "16704d0f-ad69-4cc9-890a-77c268d78151" (UID: "16704d0f-ad69-4cc9-890a-77c268d78151"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.170624 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/54bedb11-6943-4e34-a221-8dbd2cfd5eee-server-conf" (OuterVolumeSpecName: "server-conf") pod "54bedb11-6943-4e34-a221-8dbd2cfd5eee" (UID: "54bedb11-6943-4e34-a221-8dbd2cfd5eee"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.174692 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/54bedb11-6943-4e34-a221-8dbd2cfd5eee-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "54bedb11-6943-4e34-a221-8dbd2cfd5eee" (UID: "54bedb11-6943-4e34-a221-8dbd2cfd5eee"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.203780 4911 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/16704d0f-ad69-4cc9-890a-77c268d78151-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.203834 4911 reconciler_common.go:293] "Volume detached for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" DevicePath \"\"" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.203850 4911 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/16704d0f-ad69-4cc9-890a-77c268d78151-server-conf\") on node \"crc\" DevicePath \"\"" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.203863 4911 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.203881 4911 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/54bedb11-6943-4e34-a221-8dbd2cfd5eee-server-conf\") on node \"crc\" DevicePath \"\"" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.203892 4911 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/54bedb11-6943-4e34-a221-8dbd2cfd5eee-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Sep 29 21:45:35 crc kubenswrapper[4911]: E0929 21:45:35.553016 4911 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.98:5001/podified-master-centos10/openstack-ceilometer-central:telemetry_latest" Sep 29 21:45:35 crc kubenswrapper[4911]: E0929 21:45:35.553076 4911 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.98:5001/podified-master-centos10/openstack-ceilometer-central:telemetry_latest" Sep 29 21:45:35 crc kubenswrapper[4911]: E0929 21:45:35.553202 4911 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ceilometer-central-agent,Image:38.102.83.98:5001/podified-master-centos10/openstack-ceilometer-central:telemetry_latest,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n64ch64ch57ch598hddh555h9bh7dh59dh59dh674h59h68bhb6hch688h686h67dhfchc4h5f4h665h8h5cdhb9h64fh5cfh574hc5h569h58ch54q,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/var/lib/openstack/bin,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:ceilometer-central-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-hf94t,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/python3 /var/lib/openstack/bin/centralhealth.py],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:300,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ceilometer-0_openstack(f523c771-a76c-4854-a62f-85e929e1a24b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.554917 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"54bedb11-6943-4e34-a221-8dbd2cfd5eee","Type":"ContainerDied","Data":"d44721ddf5f3aee40e36fd60b59bb1b90f8c2c9059f64492a96a44f8c9cabe5c"} Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.554941 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.554968 4911 scope.go:117] "RemoveContainer" containerID="4a91ba752e02ad019e74614fab8db0819aae4221643a836bef5f3dd960c39ac3" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.560570 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"16704d0f-ad69-4cc9-890a-77c268d78151","Type":"ContainerDied","Data":"3f2069697d4e0595ede69000808f174f941bb76209878f649a8a465d8bdff7eb"} Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.560667 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.599843 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.612373 4911 scope.go:117] "RemoveContainer" containerID="6332677c2f853df112183ac5da0e7e95e8fdbc8790b02d3fe1434e560e32033d" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.633188 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.662164 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.671737 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.681919 4911 scope.go:117] "RemoveContainer" containerID="69fddeba00ec9a43661c93507c63cebfd0cabbe5003764a2097f0fb5cbe78287" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.682040 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 21:45:35 crc kubenswrapper[4911]: E0929 21:45:35.682406 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54bedb11-6943-4e34-a221-8dbd2cfd5eee" containerName="rabbitmq" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.682417 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="54bedb11-6943-4e34-a221-8dbd2cfd5eee" containerName="rabbitmq" Sep 29 21:45:35 crc kubenswrapper[4911]: E0929 21:45:35.682431 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16704d0f-ad69-4cc9-890a-77c268d78151" containerName="rabbitmq" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.682437 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="16704d0f-ad69-4cc9-890a-77c268d78151" containerName="rabbitmq" Sep 29 21:45:35 crc kubenswrapper[4911]: E0929 21:45:35.682454 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54bedb11-6943-4e34-a221-8dbd2cfd5eee" containerName="setup-container" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.682460 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="54bedb11-6943-4e34-a221-8dbd2cfd5eee" containerName="setup-container" Sep 29 21:45:35 crc kubenswrapper[4911]: E0929 21:45:35.682470 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16704d0f-ad69-4cc9-890a-77c268d78151" containerName="setup-container" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.682477 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="16704d0f-ad69-4cc9-890a-77c268d78151" containerName="setup-container" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.682672 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="16704d0f-ad69-4cc9-890a-77c268d78151" containerName="rabbitmq" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.682687 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="54bedb11-6943-4e34-a221-8dbd2cfd5eee" containerName="rabbitmq" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.684461 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.687018 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-99ttz" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.689410 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.689653 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.690993 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.691194 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.691396 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.691541 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.699268 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.712061 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.713765 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.717105 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.717435 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-tlxqf" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.717523 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.717690 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.717721 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.718471 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.718868 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.721967 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.724205 4911 scope.go:117] "RemoveContainer" containerID="337a146ff0ca619afb1efb1546eb8d6c2739a71f7a38410dc78114bda5bfa1ee" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.730313 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/ba53369f-42c7-4fb2-82e2-cf4eaebcedd7-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"ba53369f-42c7-4fb2-82e2-cf4eaebcedd7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.730343 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"ba53369f-42c7-4fb2-82e2-cf4eaebcedd7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.730374 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ba53369f-42c7-4fb2-82e2-cf4eaebcedd7-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"ba53369f-42c7-4fb2-82e2-cf4eaebcedd7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.730433 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/ba53369f-42c7-4fb2-82e2-cf4eaebcedd7-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"ba53369f-42c7-4fb2-82e2-cf4eaebcedd7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.730487 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/ba53369f-42c7-4fb2-82e2-cf4eaebcedd7-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"ba53369f-42c7-4fb2-82e2-cf4eaebcedd7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.730507 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/ba53369f-42c7-4fb2-82e2-cf4eaebcedd7-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"ba53369f-42c7-4fb2-82e2-cf4eaebcedd7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.730557 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4xqpw\" (UniqueName: \"kubernetes.io/projected/ba53369f-42c7-4fb2-82e2-cf4eaebcedd7-kube-api-access-4xqpw\") pod \"rabbitmq-cell1-server-0\" (UID: \"ba53369f-42c7-4fb2-82e2-cf4eaebcedd7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.730607 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/ba53369f-42c7-4fb2-82e2-cf4eaebcedd7-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"ba53369f-42c7-4fb2-82e2-cf4eaebcedd7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.730621 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/ba53369f-42c7-4fb2-82e2-cf4eaebcedd7-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"ba53369f-42c7-4fb2-82e2-cf4eaebcedd7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.730638 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/ba53369f-42c7-4fb2-82e2-cf4eaebcedd7-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"ba53369f-42c7-4fb2-82e2-cf4eaebcedd7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.730678 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/ba53369f-42c7-4fb2-82e2-cf4eaebcedd7-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"ba53369f-42c7-4fb2-82e2-cf4eaebcedd7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.832683 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/ba53369f-42c7-4fb2-82e2-cf4eaebcedd7-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"ba53369f-42c7-4fb2-82e2-cf4eaebcedd7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.832725 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-52v2n\" (UniqueName: \"kubernetes.io/projected/a4102e69-ec0f-43d2-aaf9-0b760d487420-kube-api-access-52v2n\") pod \"rabbitmq-server-0\" (UID: \"a4102e69-ec0f-43d2-aaf9-0b760d487420\") " pod="openstack/rabbitmq-server-0" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.832751 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"ba53369f-42c7-4fb2-82e2-cf4eaebcedd7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.832776 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ba53369f-42c7-4fb2-82e2-cf4eaebcedd7-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"ba53369f-42c7-4fb2-82e2-cf4eaebcedd7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.832853 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-server-0\" (UID: \"a4102e69-ec0f-43d2-aaf9-0b760d487420\") " pod="openstack/rabbitmq-server-0" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.832874 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/a4102e69-ec0f-43d2-aaf9-0b760d487420-server-conf\") pod \"rabbitmq-server-0\" (UID: \"a4102e69-ec0f-43d2-aaf9-0b760d487420\") " pod="openstack/rabbitmq-server-0" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.832922 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/ba53369f-42c7-4fb2-82e2-cf4eaebcedd7-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"ba53369f-42c7-4fb2-82e2-cf4eaebcedd7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.832960 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/a4102e69-ec0f-43d2-aaf9-0b760d487420-pod-info\") pod \"rabbitmq-server-0\" (UID: \"a4102e69-ec0f-43d2-aaf9-0b760d487420\") " pod="openstack/rabbitmq-server-0" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.832987 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/ba53369f-42c7-4fb2-82e2-cf4eaebcedd7-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"ba53369f-42c7-4fb2-82e2-cf4eaebcedd7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.833010 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/ba53369f-42c7-4fb2-82e2-cf4eaebcedd7-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"ba53369f-42c7-4fb2-82e2-cf4eaebcedd7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.833033 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/a4102e69-ec0f-43d2-aaf9-0b760d487420-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"a4102e69-ec0f-43d2-aaf9-0b760d487420\") " pod="openstack/rabbitmq-server-0" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.833055 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4xqpw\" (UniqueName: \"kubernetes.io/projected/ba53369f-42c7-4fb2-82e2-cf4eaebcedd7-kube-api-access-4xqpw\") pod \"rabbitmq-cell1-server-0\" (UID: \"ba53369f-42c7-4fb2-82e2-cf4eaebcedd7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.833072 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/a4102e69-ec0f-43d2-aaf9-0b760d487420-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"a4102e69-ec0f-43d2-aaf9-0b760d487420\") " pod="openstack/rabbitmq-server-0" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.833085 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/a4102e69-ec0f-43d2-aaf9-0b760d487420-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"a4102e69-ec0f-43d2-aaf9-0b760d487420\") " pod="openstack/rabbitmq-server-0" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.833112 4911 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"ba53369f-42c7-4fb2-82e2-cf4eaebcedd7\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.833116 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a4102e69-ec0f-43d2-aaf9-0b760d487420-config-data\") pod \"rabbitmq-server-0\" (UID: \"a4102e69-ec0f-43d2-aaf9-0b760d487420\") " pod="openstack/rabbitmq-server-0" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.833146 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/ba53369f-42c7-4fb2-82e2-cf4eaebcedd7-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"ba53369f-42c7-4fb2-82e2-cf4eaebcedd7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.833163 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/ba53369f-42c7-4fb2-82e2-cf4eaebcedd7-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"ba53369f-42c7-4fb2-82e2-cf4eaebcedd7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.833179 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/ba53369f-42c7-4fb2-82e2-cf4eaebcedd7-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"ba53369f-42c7-4fb2-82e2-cf4eaebcedd7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.833205 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/a4102e69-ec0f-43d2-aaf9-0b760d487420-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"a4102e69-ec0f-43d2-aaf9-0b760d487420\") " pod="openstack/rabbitmq-server-0" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.833221 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/a4102e69-ec0f-43d2-aaf9-0b760d487420-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"a4102e69-ec0f-43d2-aaf9-0b760d487420\") " pod="openstack/rabbitmq-server-0" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.833238 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/ba53369f-42c7-4fb2-82e2-cf4eaebcedd7-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"ba53369f-42c7-4fb2-82e2-cf4eaebcedd7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.833257 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/a4102e69-ec0f-43d2-aaf9-0b760d487420-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"a4102e69-ec0f-43d2-aaf9-0b760d487420\") " pod="openstack/rabbitmq-server-0" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.833901 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/ba53369f-42c7-4fb2-82e2-cf4eaebcedd7-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"ba53369f-42c7-4fb2-82e2-cf4eaebcedd7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.834093 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ba53369f-42c7-4fb2-82e2-cf4eaebcedd7-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"ba53369f-42c7-4fb2-82e2-cf4eaebcedd7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.834327 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/ba53369f-42c7-4fb2-82e2-cf4eaebcedd7-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"ba53369f-42c7-4fb2-82e2-cf4eaebcedd7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.834331 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/ba53369f-42c7-4fb2-82e2-cf4eaebcedd7-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"ba53369f-42c7-4fb2-82e2-cf4eaebcedd7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.835308 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/ba53369f-42c7-4fb2-82e2-cf4eaebcedd7-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"ba53369f-42c7-4fb2-82e2-cf4eaebcedd7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.838593 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/ba53369f-42c7-4fb2-82e2-cf4eaebcedd7-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"ba53369f-42c7-4fb2-82e2-cf4eaebcedd7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.840804 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/ba53369f-42c7-4fb2-82e2-cf4eaebcedd7-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"ba53369f-42c7-4fb2-82e2-cf4eaebcedd7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.844168 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/ba53369f-42c7-4fb2-82e2-cf4eaebcedd7-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"ba53369f-42c7-4fb2-82e2-cf4eaebcedd7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.844255 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/ba53369f-42c7-4fb2-82e2-cf4eaebcedd7-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"ba53369f-42c7-4fb2-82e2-cf4eaebcedd7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.848765 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4xqpw\" (UniqueName: \"kubernetes.io/projected/ba53369f-42c7-4fb2-82e2-cf4eaebcedd7-kube-api-access-4xqpw\") pod \"rabbitmq-cell1-server-0\" (UID: \"ba53369f-42c7-4fb2-82e2-cf4eaebcedd7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.865072 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"ba53369f-42c7-4fb2-82e2-cf4eaebcedd7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.935294 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/a4102e69-ec0f-43d2-aaf9-0b760d487420-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"a4102e69-ec0f-43d2-aaf9-0b760d487420\") " pod="openstack/rabbitmq-server-0" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.935653 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/a4102e69-ec0f-43d2-aaf9-0b760d487420-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"a4102e69-ec0f-43d2-aaf9-0b760d487420\") " pod="openstack/rabbitmq-server-0" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.935705 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a4102e69-ec0f-43d2-aaf9-0b760d487420-config-data\") pod \"rabbitmq-server-0\" (UID: \"a4102e69-ec0f-43d2-aaf9-0b760d487420\") " pod="openstack/rabbitmq-server-0" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.935753 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/a4102e69-ec0f-43d2-aaf9-0b760d487420-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"a4102e69-ec0f-43d2-aaf9-0b760d487420\") " pod="openstack/rabbitmq-server-0" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.935775 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/a4102e69-ec0f-43d2-aaf9-0b760d487420-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"a4102e69-ec0f-43d2-aaf9-0b760d487420\") " pod="openstack/rabbitmq-server-0" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.935823 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/a4102e69-ec0f-43d2-aaf9-0b760d487420-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"a4102e69-ec0f-43d2-aaf9-0b760d487420\") " pod="openstack/rabbitmq-server-0" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.935864 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-52v2n\" (UniqueName: \"kubernetes.io/projected/a4102e69-ec0f-43d2-aaf9-0b760d487420-kube-api-access-52v2n\") pod \"rabbitmq-server-0\" (UID: \"a4102e69-ec0f-43d2-aaf9-0b760d487420\") " pod="openstack/rabbitmq-server-0" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.935917 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-server-0\" (UID: \"a4102e69-ec0f-43d2-aaf9-0b760d487420\") " pod="openstack/rabbitmq-server-0" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.935939 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/a4102e69-ec0f-43d2-aaf9-0b760d487420-server-conf\") pod \"rabbitmq-server-0\" (UID: \"a4102e69-ec0f-43d2-aaf9-0b760d487420\") " pod="openstack/rabbitmq-server-0" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.936028 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/a4102e69-ec0f-43d2-aaf9-0b760d487420-pod-info\") pod \"rabbitmq-server-0\" (UID: \"a4102e69-ec0f-43d2-aaf9-0b760d487420\") " pod="openstack/rabbitmq-server-0" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.936087 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/a4102e69-ec0f-43d2-aaf9-0b760d487420-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"a4102e69-ec0f-43d2-aaf9-0b760d487420\") " pod="openstack/rabbitmq-server-0" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.937644 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/a4102e69-ec0f-43d2-aaf9-0b760d487420-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"a4102e69-ec0f-43d2-aaf9-0b760d487420\") " pod="openstack/rabbitmq-server-0" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.938809 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a4102e69-ec0f-43d2-aaf9-0b760d487420-config-data\") pod \"rabbitmq-server-0\" (UID: \"a4102e69-ec0f-43d2-aaf9-0b760d487420\") " pod="openstack/rabbitmq-server-0" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.939153 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/a4102e69-ec0f-43d2-aaf9-0b760d487420-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"a4102e69-ec0f-43d2-aaf9-0b760d487420\") " pod="openstack/rabbitmq-server-0" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.939318 4911 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-server-0\" (UID: \"a4102e69-ec0f-43d2-aaf9-0b760d487420\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/rabbitmq-server-0" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.939492 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/a4102e69-ec0f-43d2-aaf9-0b760d487420-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"a4102e69-ec0f-43d2-aaf9-0b760d487420\") " pod="openstack/rabbitmq-server-0" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.940497 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/a4102e69-ec0f-43d2-aaf9-0b760d487420-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"a4102e69-ec0f-43d2-aaf9-0b760d487420\") " pod="openstack/rabbitmq-server-0" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.940571 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/a4102e69-ec0f-43d2-aaf9-0b760d487420-server-conf\") pod \"rabbitmq-server-0\" (UID: \"a4102e69-ec0f-43d2-aaf9-0b760d487420\") " pod="openstack/rabbitmq-server-0" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.941122 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/a4102e69-ec0f-43d2-aaf9-0b760d487420-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"a4102e69-ec0f-43d2-aaf9-0b760d487420\") " pod="openstack/rabbitmq-server-0" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.942751 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/a4102e69-ec0f-43d2-aaf9-0b760d487420-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"a4102e69-ec0f-43d2-aaf9-0b760d487420\") " pod="openstack/rabbitmq-server-0" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.943543 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/a4102e69-ec0f-43d2-aaf9-0b760d487420-pod-info\") pod \"rabbitmq-server-0\" (UID: \"a4102e69-ec0f-43d2-aaf9-0b760d487420\") " pod="openstack/rabbitmq-server-0" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.966009 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-52v2n\" (UniqueName: \"kubernetes.io/projected/a4102e69-ec0f-43d2-aaf9-0b760d487420-kube-api-access-52v2n\") pod \"rabbitmq-server-0\" (UID: \"a4102e69-ec0f-43d2-aaf9-0b760d487420\") " pod="openstack/rabbitmq-server-0" Sep 29 21:45:35 crc kubenswrapper[4911]: I0929 21:45:35.993250 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-server-0\" (UID: \"a4102e69-ec0f-43d2-aaf9-0b760d487420\") " pod="openstack/rabbitmq-server-0" Sep 29 21:45:36 crc kubenswrapper[4911]: I0929 21:45:36.018152 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:45:36 crc kubenswrapper[4911]: I0929 21:45:36.031813 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 29 21:45:36 crc kubenswrapper[4911]: I0929 21:45:36.047855 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-n47q5"] Sep 29 21:45:36 crc kubenswrapper[4911]: W0929 21:45:36.062088 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb47dfd70_23bd_4c0c_badf_0ac99f95f0cd.slice/crio-3580aa71f640cb9c541f78e26ee8e776e88479f28bbfa72cfad46c79149f2c21 WatchSource:0}: Error finding container 3580aa71f640cb9c541f78e26ee8e776e88479f28bbfa72cfad46c79149f2c21: Status 404 returned error can't find the container with id 3580aa71f640cb9c541f78e26ee8e776e88479f28bbfa72cfad46c79149f2c21 Sep 29 21:45:36 crc kubenswrapper[4911]: W0929 21:45:36.563272 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podba53369f_42c7_4fb2_82e2_cf4eaebcedd7.slice/crio-9c0fd105f092b7b7ba7bfdccbceea44228341278eb506df1a14bc831076faf97 WatchSource:0}: Error finding container 9c0fd105f092b7b7ba7bfdccbceea44228341278eb506df1a14bc831076faf97: Status 404 returned error can't find the container with id 9c0fd105f092b7b7ba7bfdccbceea44228341278eb506df1a14bc831076faf97 Sep 29 21:45:36 crc kubenswrapper[4911]: I0929 21:45:36.576372 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 21:45:36 crc kubenswrapper[4911]: I0929 21:45:36.577043 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f523c771-a76c-4854-a62f-85e929e1a24b","Type":"ContainerStarted","Data":"522d39b33961c43600a768ab5e6c9aa1c0920cde5a1eb5291326ffb5ccf952d0"} Sep 29 21:45:36 crc kubenswrapper[4911]: I0929 21:45:36.578879 4911 generic.go:334] "Generic (PLEG): container finished" podID="b47dfd70-23bd-4c0c-badf-0ac99f95f0cd" containerID="7005b0dfeed6ee19254bbfe18e1b5b48576b2b3b900e6199bfc327f573151748" exitCode=0 Sep 29 21:45:36 crc kubenswrapper[4911]: I0929 21:45:36.578927 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67b789f86c-n47q5" event={"ID":"b47dfd70-23bd-4c0c-badf-0ac99f95f0cd","Type":"ContainerDied","Data":"7005b0dfeed6ee19254bbfe18e1b5b48576b2b3b900e6199bfc327f573151748"} Sep 29 21:45:36 crc kubenswrapper[4911]: I0929 21:45:36.578942 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67b789f86c-n47q5" event={"ID":"b47dfd70-23bd-4c0c-badf-0ac99f95f0cd","Type":"ContainerStarted","Data":"3580aa71f640cb9c541f78e26ee8e776e88479f28bbfa72cfad46c79149f2c21"} Sep 29 21:45:36 crc kubenswrapper[4911]: I0929 21:45:36.631767 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 21:45:36 crc kubenswrapper[4911]: I0929 21:45:36.752024 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="16704d0f-ad69-4cc9-890a-77c268d78151" path="/var/lib/kubelet/pods/16704d0f-ad69-4cc9-890a-77c268d78151/volumes" Sep 29 21:45:36 crc kubenswrapper[4911]: I0929 21:45:36.753276 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="54bedb11-6943-4e34-a221-8dbd2cfd5eee" path="/var/lib/kubelet/pods/54bedb11-6943-4e34-a221-8dbd2cfd5eee/volumes" Sep 29 21:45:37 crc kubenswrapper[4911]: I0929 21:45:37.594633 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"a4102e69-ec0f-43d2-aaf9-0b760d487420","Type":"ContainerStarted","Data":"e85ad334161926b9d0af6f60984eb85be0cdcc1ec5a3b0286b01ada679b78411"} Sep 29 21:45:37 crc kubenswrapper[4911]: I0929 21:45:37.595151 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"a4102e69-ec0f-43d2-aaf9-0b760d487420","Type":"ContainerStarted","Data":"2f1d8abd96260f0a70ae99b7357435d83533ccf1bd52f87313900cdd128ef26e"} Sep 29 21:45:37 crc kubenswrapper[4911]: I0929 21:45:37.596337 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f523c771-a76c-4854-a62f-85e929e1a24b","Type":"ContainerStarted","Data":"90e66e9c61203eac6ff6251eca642f545676666c48546e22f0fe2ec932b12715"} Sep 29 21:45:37 crc kubenswrapper[4911]: I0929 21:45:37.598549 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"ba53369f-42c7-4fb2-82e2-cf4eaebcedd7","Type":"ContainerStarted","Data":"a712701ef091deab413354f02a87ee344adf89fbf3c31005b78d9cffad57e3fa"} Sep 29 21:45:37 crc kubenswrapper[4911]: I0929 21:45:37.598606 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"ba53369f-42c7-4fb2-82e2-cf4eaebcedd7","Type":"ContainerStarted","Data":"9c0fd105f092b7b7ba7bfdccbceea44228341278eb506df1a14bc831076faf97"} Sep 29 21:45:37 crc kubenswrapper[4911]: I0929 21:45:37.602212 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67b789f86c-n47q5" event={"ID":"b47dfd70-23bd-4c0c-badf-0ac99f95f0cd","Type":"ContainerStarted","Data":"fbe1e393472fbad04d3c5afcd04c98f4b204470241778c5d4f134a305613c0b2"} Sep 29 21:45:37 crc kubenswrapper[4911]: I0929 21:45:37.602326 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-67b789f86c-n47q5" Sep 29 21:45:37 crc kubenswrapper[4911]: I0929 21:45:37.651667 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-67b789f86c-n47q5" podStartSLOduration=10.651640876 podStartE2EDuration="10.651640876s" podCreationTimestamp="2025-09-29 21:45:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:45:37.645280948 +0000 UTC m=+1215.622393669" watchObservedRunningTime="2025-09-29 21:45:37.651640876 +0000 UTC m=+1215.628753587" Sep 29 21:45:38 crc kubenswrapper[4911]: E0929 21:45:38.963026 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ceilometer-central-agent\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/ceilometer-0" podUID="f523c771-a76c-4854-a62f-85e929e1a24b" Sep 29 21:45:39 crc kubenswrapper[4911]: I0929 21:45:39.256714 4911 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="16704d0f-ad69-4cc9-890a-77c268d78151" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.105:5671: i/o timeout" Sep 29 21:45:39 crc kubenswrapper[4911]: I0929 21:45:39.502212 4911 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="54bedb11-6943-4e34-a221-8dbd2cfd5eee" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.106:5671: i/o timeout" Sep 29 21:45:39 crc kubenswrapper[4911]: I0929 21:45:39.627839 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f523c771-a76c-4854-a62f-85e929e1a24b","Type":"ContainerStarted","Data":"e6b054ee7cb44cc6ca8c99c0dd03f90a290454ed24e04ef5ec6ed6badde62656"} Sep 29 21:45:39 crc kubenswrapper[4911]: I0929 21:45:39.628632 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 29 21:45:39 crc kubenswrapper[4911]: E0929 21:45:39.630631 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ceilometer-central-agent\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.98:5001/podified-master-centos10/openstack-ceilometer-central:telemetry_latest\\\"\"" pod="openstack/ceilometer-0" podUID="f523c771-a76c-4854-a62f-85e929e1a24b" Sep 29 21:45:40 crc kubenswrapper[4911]: E0929 21:45:40.643517 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ceilometer-central-agent\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.98:5001/podified-master-centos10/openstack-ceilometer-central:telemetry_latest\\\"\"" pod="openstack/ceilometer-0" podUID="f523c771-a76c-4854-a62f-85e929e1a24b" Sep 29 21:45:42 crc kubenswrapper[4911]: I0929 21:45:42.440051 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-67b789f86c-n47q5" Sep 29 21:45:42 crc kubenswrapper[4911]: I0929 21:45:42.510917 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-j2scw"] Sep 29 21:45:42 crc kubenswrapper[4911]: I0929 21:45:42.511149 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-59cf4bdb65-j2scw" podUID="037d778b-c252-409b-9b11-0bd0911d7d4f" containerName="dnsmasq-dns" containerID="cri-o://c7917b7026a466b130b34fd319d1181f8256315131c83c1d784faeccd99a5b8d" gracePeriod=10 Sep 29 21:45:42 crc kubenswrapper[4911]: I0929 21:45:42.669766 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-cb6ffcf87-msfsz"] Sep 29 21:45:42 crc kubenswrapper[4911]: I0929 21:45:42.671387 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cb6ffcf87-msfsz" Sep 29 21:45:42 crc kubenswrapper[4911]: I0929 21:45:42.677202 4911 generic.go:334] "Generic (PLEG): container finished" podID="037d778b-c252-409b-9b11-0bd0911d7d4f" containerID="c7917b7026a466b130b34fd319d1181f8256315131c83c1d784faeccd99a5b8d" exitCode=0 Sep 29 21:45:42 crc kubenswrapper[4911]: I0929 21:45:42.677252 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59cf4bdb65-j2scw" event={"ID":"037d778b-c252-409b-9b11-0bd0911d7d4f","Type":"ContainerDied","Data":"c7917b7026a466b130b34fd319d1181f8256315131c83c1d784faeccd99a5b8d"} Sep 29 21:45:42 crc kubenswrapper[4911]: I0929 21:45:42.701418 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-cb6ffcf87-msfsz"] Sep 29 21:45:42 crc kubenswrapper[4911]: I0929 21:45:42.751221 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0b286d05-e9e4-4862-bbf0-6f4f658a56e0-config\") pod \"dnsmasq-dns-cb6ffcf87-msfsz\" (UID: \"0b286d05-e9e4-4862-bbf0-6f4f658a56e0\") " pod="openstack/dnsmasq-dns-cb6ffcf87-msfsz" Sep 29 21:45:42 crc kubenswrapper[4911]: I0929 21:45:42.751290 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-clk6h\" (UniqueName: \"kubernetes.io/projected/0b286d05-e9e4-4862-bbf0-6f4f658a56e0-kube-api-access-clk6h\") pod \"dnsmasq-dns-cb6ffcf87-msfsz\" (UID: \"0b286d05-e9e4-4862-bbf0-6f4f658a56e0\") " pod="openstack/dnsmasq-dns-cb6ffcf87-msfsz" Sep 29 21:45:42 crc kubenswrapper[4911]: I0929 21:45:42.751401 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0b286d05-e9e4-4862-bbf0-6f4f658a56e0-ovsdbserver-sb\") pod \"dnsmasq-dns-cb6ffcf87-msfsz\" (UID: \"0b286d05-e9e4-4862-bbf0-6f4f658a56e0\") " pod="openstack/dnsmasq-dns-cb6ffcf87-msfsz" Sep 29 21:45:42 crc kubenswrapper[4911]: I0929 21:45:42.751470 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0b286d05-e9e4-4862-bbf0-6f4f658a56e0-dns-swift-storage-0\") pod \"dnsmasq-dns-cb6ffcf87-msfsz\" (UID: \"0b286d05-e9e4-4862-bbf0-6f4f658a56e0\") " pod="openstack/dnsmasq-dns-cb6ffcf87-msfsz" Sep 29 21:45:42 crc kubenswrapper[4911]: I0929 21:45:42.751497 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0b286d05-e9e4-4862-bbf0-6f4f658a56e0-dns-svc\") pod \"dnsmasq-dns-cb6ffcf87-msfsz\" (UID: \"0b286d05-e9e4-4862-bbf0-6f4f658a56e0\") " pod="openstack/dnsmasq-dns-cb6ffcf87-msfsz" Sep 29 21:45:42 crc kubenswrapper[4911]: I0929 21:45:42.751560 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/0b286d05-e9e4-4862-bbf0-6f4f658a56e0-openstack-edpm-ipam\") pod \"dnsmasq-dns-cb6ffcf87-msfsz\" (UID: \"0b286d05-e9e4-4862-bbf0-6f4f658a56e0\") " pod="openstack/dnsmasq-dns-cb6ffcf87-msfsz" Sep 29 21:45:42 crc kubenswrapper[4911]: I0929 21:45:42.751620 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0b286d05-e9e4-4862-bbf0-6f4f658a56e0-ovsdbserver-nb\") pod \"dnsmasq-dns-cb6ffcf87-msfsz\" (UID: \"0b286d05-e9e4-4862-bbf0-6f4f658a56e0\") " pod="openstack/dnsmasq-dns-cb6ffcf87-msfsz" Sep 29 21:45:42 crc kubenswrapper[4911]: I0929 21:45:42.853195 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0b286d05-e9e4-4862-bbf0-6f4f658a56e0-ovsdbserver-nb\") pod \"dnsmasq-dns-cb6ffcf87-msfsz\" (UID: \"0b286d05-e9e4-4862-bbf0-6f4f658a56e0\") " pod="openstack/dnsmasq-dns-cb6ffcf87-msfsz" Sep 29 21:45:42 crc kubenswrapper[4911]: I0929 21:45:42.853319 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0b286d05-e9e4-4862-bbf0-6f4f658a56e0-config\") pod \"dnsmasq-dns-cb6ffcf87-msfsz\" (UID: \"0b286d05-e9e4-4862-bbf0-6f4f658a56e0\") " pod="openstack/dnsmasq-dns-cb6ffcf87-msfsz" Sep 29 21:45:42 crc kubenswrapper[4911]: I0929 21:45:42.853349 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-clk6h\" (UniqueName: \"kubernetes.io/projected/0b286d05-e9e4-4862-bbf0-6f4f658a56e0-kube-api-access-clk6h\") pod \"dnsmasq-dns-cb6ffcf87-msfsz\" (UID: \"0b286d05-e9e4-4862-bbf0-6f4f658a56e0\") " pod="openstack/dnsmasq-dns-cb6ffcf87-msfsz" Sep 29 21:45:42 crc kubenswrapper[4911]: I0929 21:45:42.853384 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0b286d05-e9e4-4862-bbf0-6f4f658a56e0-ovsdbserver-sb\") pod \"dnsmasq-dns-cb6ffcf87-msfsz\" (UID: \"0b286d05-e9e4-4862-bbf0-6f4f658a56e0\") " pod="openstack/dnsmasq-dns-cb6ffcf87-msfsz" Sep 29 21:45:42 crc kubenswrapper[4911]: I0929 21:45:42.853411 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0b286d05-e9e4-4862-bbf0-6f4f658a56e0-dns-swift-storage-0\") pod \"dnsmasq-dns-cb6ffcf87-msfsz\" (UID: \"0b286d05-e9e4-4862-bbf0-6f4f658a56e0\") " pod="openstack/dnsmasq-dns-cb6ffcf87-msfsz" Sep 29 21:45:42 crc kubenswrapper[4911]: I0929 21:45:42.853428 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0b286d05-e9e4-4862-bbf0-6f4f658a56e0-dns-svc\") pod \"dnsmasq-dns-cb6ffcf87-msfsz\" (UID: \"0b286d05-e9e4-4862-bbf0-6f4f658a56e0\") " pod="openstack/dnsmasq-dns-cb6ffcf87-msfsz" Sep 29 21:45:42 crc kubenswrapper[4911]: I0929 21:45:42.853468 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/0b286d05-e9e4-4862-bbf0-6f4f658a56e0-openstack-edpm-ipam\") pod \"dnsmasq-dns-cb6ffcf87-msfsz\" (UID: \"0b286d05-e9e4-4862-bbf0-6f4f658a56e0\") " pod="openstack/dnsmasq-dns-cb6ffcf87-msfsz" Sep 29 21:45:42 crc kubenswrapper[4911]: I0929 21:45:42.854263 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/0b286d05-e9e4-4862-bbf0-6f4f658a56e0-openstack-edpm-ipam\") pod \"dnsmasq-dns-cb6ffcf87-msfsz\" (UID: \"0b286d05-e9e4-4862-bbf0-6f4f658a56e0\") " pod="openstack/dnsmasq-dns-cb6ffcf87-msfsz" Sep 29 21:45:42 crc kubenswrapper[4911]: I0929 21:45:42.854550 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0b286d05-e9e4-4862-bbf0-6f4f658a56e0-ovsdbserver-nb\") pod \"dnsmasq-dns-cb6ffcf87-msfsz\" (UID: \"0b286d05-e9e4-4862-bbf0-6f4f658a56e0\") " pod="openstack/dnsmasq-dns-cb6ffcf87-msfsz" Sep 29 21:45:42 crc kubenswrapper[4911]: I0929 21:45:42.855602 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0b286d05-e9e4-4862-bbf0-6f4f658a56e0-config\") pod \"dnsmasq-dns-cb6ffcf87-msfsz\" (UID: \"0b286d05-e9e4-4862-bbf0-6f4f658a56e0\") " pod="openstack/dnsmasq-dns-cb6ffcf87-msfsz" Sep 29 21:45:42 crc kubenswrapper[4911]: I0929 21:45:42.855606 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0b286d05-e9e4-4862-bbf0-6f4f658a56e0-dns-swift-storage-0\") pod \"dnsmasq-dns-cb6ffcf87-msfsz\" (UID: \"0b286d05-e9e4-4862-bbf0-6f4f658a56e0\") " pod="openstack/dnsmasq-dns-cb6ffcf87-msfsz" Sep 29 21:45:42 crc kubenswrapper[4911]: I0929 21:45:42.855661 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0b286d05-e9e4-4862-bbf0-6f4f658a56e0-ovsdbserver-sb\") pod \"dnsmasq-dns-cb6ffcf87-msfsz\" (UID: \"0b286d05-e9e4-4862-bbf0-6f4f658a56e0\") " pod="openstack/dnsmasq-dns-cb6ffcf87-msfsz" Sep 29 21:45:42 crc kubenswrapper[4911]: I0929 21:45:42.855727 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0b286d05-e9e4-4862-bbf0-6f4f658a56e0-dns-svc\") pod \"dnsmasq-dns-cb6ffcf87-msfsz\" (UID: \"0b286d05-e9e4-4862-bbf0-6f4f658a56e0\") " pod="openstack/dnsmasq-dns-cb6ffcf87-msfsz" Sep 29 21:45:42 crc kubenswrapper[4911]: I0929 21:45:42.873382 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-clk6h\" (UniqueName: \"kubernetes.io/projected/0b286d05-e9e4-4862-bbf0-6f4f658a56e0-kube-api-access-clk6h\") pod \"dnsmasq-dns-cb6ffcf87-msfsz\" (UID: \"0b286d05-e9e4-4862-bbf0-6f4f658a56e0\") " pod="openstack/dnsmasq-dns-cb6ffcf87-msfsz" Sep 29 21:45:43 crc kubenswrapper[4911]: I0929 21:45:43.045835 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cb6ffcf87-msfsz" Sep 29 21:45:43 crc kubenswrapper[4911]: I0929 21:45:43.089870 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59cf4bdb65-j2scw" Sep 29 21:45:43 crc kubenswrapper[4911]: I0929 21:45:43.259843 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/037d778b-c252-409b-9b11-0bd0911d7d4f-dns-svc\") pod \"037d778b-c252-409b-9b11-0bd0911d7d4f\" (UID: \"037d778b-c252-409b-9b11-0bd0911d7d4f\") " Sep 29 21:45:43 crc kubenswrapper[4911]: I0929 21:45:43.259893 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/037d778b-c252-409b-9b11-0bd0911d7d4f-ovsdbserver-nb\") pod \"037d778b-c252-409b-9b11-0bd0911d7d4f\" (UID: \"037d778b-c252-409b-9b11-0bd0911d7d4f\") " Sep 29 21:45:43 crc kubenswrapper[4911]: I0929 21:45:43.260124 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/037d778b-c252-409b-9b11-0bd0911d7d4f-dns-swift-storage-0\") pod \"037d778b-c252-409b-9b11-0bd0911d7d4f\" (UID: \"037d778b-c252-409b-9b11-0bd0911d7d4f\") " Sep 29 21:45:43 crc kubenswrapper[4911]: I0929 21:45:43.260214 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/037d778b-c252-409b-9b11-0bd0911d7d4f-config\") pod \"037d778b-c252-409b-9b11-0bd0911d7d4f\" (UID: \"037d778b-c252-409b-9b11-0bd0911d7d4f\") " Sep 29 21:45:43 crc kubenswrapper[4911]: I0929 21:45:43.260249 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/037d778b-c252-409b-9b11-0bd0911d7d4f-ovsdbserver-sb\") pod \"037d778b-c252-409b-9b11-0bd0911d7d4f\" (UID: \"037d778b-c252-409b-9b11-0bd0911d7d4f\") " Sep 29 21:45:43 crc kubenswrapper[4911]: I0929 21:45:43.260274 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bdlz6\" (UniqueName: \"kubernetes.io/projected/037d778b-c252-409b-9b11-0bd0911d7d4f-kube-api-access-bdlz6\") pod \"037d778b-c252-409b-9b11-0bd0911d7d4f\" (UID: \"037d778b-c252-409b-9b11-0bd0911d7d4f\") " Sep 29 21:45:43 crc kubenswrapper[4911]: I0929 21:45:43.268567 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/037d778b-c252-409b-9b11-0bd0911d7d4f-kube-api-access-bdlz6" (OuterVolumeSpecName: "kube-api-access-bdlz6") pod "037d778b-c252-409b-9b11-0bd0911d7d4f" (UID: "037d778b-c252-409b-9b11-0bd0911d7d4f"). InnerVolumeSpecName "kube-api-access-bdlz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:45:43 crc kubenswrapper[4911]: I0929 21:45:43.313601 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/037d778b-c252-409b-9b11-0bd0911d7d4f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "037d778b-c252-409b-9b11-0bd0911d7d4f" (UID: "037d778b-c252-409b-9b11-0bd0911d7d4f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:45:43 crc kubenswrapper[4911]: I0929 21:45:43.317988 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/037d778b-c252-409b-9b11-0bd0911d7d4f-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "037d778b-c252-409b-9b11-0bd0911d7d4f" (UID: "037d778b-c252-409b-9b11-0bd0911d7d4f"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:45:43 crc kubenswrapper[4911]: I0929 21:45:43.323980 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/037d778b-c252-409b-9b11-0bd0911d7d4f-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "037d778b-c252-409b-9b11-0bd0911d7d4f" (UID: "037d778b-c252-409b-9b11-0bd0911d7d4f"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:45:43 crc kubenswrapper[4911]: I0929 21:45:43.325653 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/037d778b-c252-409b-9b11-0bd0911d7d4f-config" (OuterVolumeSpecName: "config") pod "037d778b-c252-409b-9b11-0bd0911d7d4f" (UID: "037d778b-c252-409b-9b11-0bd0911d7d4f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:45:43 crc kubenswrapper[4911]: I0929 21:45:43.325933 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/037d778b-c252-409b-9b11-0bd0911d7d4f-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "037d778b-c252-409b-9b11-0bd0911d7d4f" (UID: "037d778b-c252-409b-9b11-0bd0911d7d4f"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:45:43 crc kubenswrapper[4911]: I0929 21:45:43.362392 4911 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/037d778b-c252-409b-9b11-0bd0911d7d4f-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 21:45:43 crc kubenswrapper[4911]: I0929 21:45:43.362613 4911 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/037d778b-c252-409b-9b11-0bd0911d7d4f-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 21:45:43 crc kubenswrapper[4911]: I0929 21:45:43.362741 4911 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/037d778b-c252-409b-9b11-0bd0911d7d4f-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 29 21:45:43 crc kubenswrapper[4911]: I0929 21:45:43.362832 4911 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/037d778b-c252-409b-9b11-0bd0911d7d4f-config\") on node \"crc\" DevicePath \"\"" Sep 29 21:45:43 crc kubenswrapper[4911]: I0929 21:45:43.362884 4911 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/037d778b-c252-409b-9b11-0bd0911d7d4f-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 21:45:43 crc kubenswrapper[4911]: I0929 21:45:43.362932 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bdlz6\" (UniqueName: \"kubernetes.io/projected/037d778b-c252-409b-9b11-0bd0911d7d4f-kube-api-access-bdlz6\") on node \"crc\" DevicePath \"\"" Sep 29 21:45:43 crc kubenswrapper[4911]: W0929 21:45:43.557236 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0b286d05_e9e4_4862_bbf0_6f4f658a56e0.slice/crio-6ddbc9edbc568937224e425b2dddfbe1b09263b87a401ed3a08e4aa96cc0cb37 WatchSource:0}: Error finding container 6ddbc9edbc568937224e425b2dddfbe1b09263b87a401ed3a08e4aa96cc0cb37: Status 404 returned error can't find the container with id 6ddbc9edbc568937224e425b2dddfbe1b09263b87a401ed3a08e4aa96cc0cb37 Sep 29 21:45:43 crc kubenswrapper[4911]: I0929 21:45:43.558403 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-cb6ffcf87-msfsz"] Sep 29 21:45:43 crc kubenswrapper[4911]: I0929 21:45:43.691660 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cb6ffcf87-msfsz" event={"ID":"0b286d05-e9e4-4862-bbf0-6f4f658a56e0","Type":"ContainerStarted","Data":"6ddbc9edbc568937224e425b2dddfbe1b09263b87a401ed3a08e4aa96cc0cb37"} Sep 29 21:45:43 crc kubenswrapper[4911]: I0929 21:45:43.694188 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59cf4bdb65-j2scw" event={"ID":"037d778b-c252-409b-9b11-0bd0911d7d4f","Type":"ContainerDied","Data":"06942fd91df31dba7c9922582f94c62b85abbe26286a096b2561da6ac1c3524f"} Sep 29 21:45:43 crc kubenswrapper[4911]: I0929 21:45:43.694259 4911 scope.go:117] "RemoveContainer" containerID="c7917b7026a466b130b34fd319d1181f8256315131c83c1d784faeccd99a5b8d" Sep 29 21:45:43 crc kubenswrapper[4911]: I0929 21:45:43.694412 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59cf4bdb65-j2scw" Sep 29 21:45:43 crc kubenswrapper[4911]: I0929 21:45:43.766298 4911 scope.go:117] "RemoveContainer" containerID="32003fac499ee3408b751a04f57a209711e9161bd58b8fee319d9fc3e6ec835c" Sep 29 21:45:43 crc kubenswrapper[4911]: I0929 21:45:43.793449 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-j2scw"] Sep 29 21:45:43 crc kubenswrapper[4911]: I0929 21:45:43.802543 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-j2scw"] Sep 29 21:45:44 crc kubenswrapper[4911]: I0929 21:45:44.703984 4911 generic.go:334] "Generic (PLEG): container finished" podID="0b286d05-e9e4-4862-bbf0-6f4f658a56e0" containerID="51bf53b50b80bcbc72f2be4375ee5a60d45d2bb3492513a668024113bc85ec45" exitCode=0 Sep 29 21:45:44 crc kubenswrapper[4911]: I0929 21:45:44.713896 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="037d778b-c252-409b-9b11-0bd0911d7d4f" path="/var/lib/kubelet/pods/037d778b-c252-409b-9b11-0bd0911d7d4f/volumes" Sep 29 21:45:44 crc kubenswrapper[4911]: I0929 21:45:44.714897 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cb6ffcf87-msfsz" event={"ID":"0b286d05-e9e4-4862-bbf0-6f4f658a56e0","Type":"ContainerDied","Data":"51bf53b50b80bcbc72f2be4375ee5a60d45d2bb3492513a668024113bc85ec45"} Sep 29 21:45:45 crc kubenswrapper[4911]: I0929 21:45:45.721652 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cb6ffcf87-msfsz" event={"ID":"0b286d05-e9e4-4862-bbf0-6f4f658a56e0","Type":"ContainerStarted","Data":"321422fb5b03763e8389a836769768bb11499bcc2cdfd9de922822300989c70f"} Sep 29 21:45:45 crc kubenswrapper[4911]: I0929 21:45:45.763396 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-cb6ffcf87-msfsz" podStartSLOduration=3.763375755 podStartE2EDuration="3.763375755s" podCreationTimestamp="2025-09-29 21:45:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:45:45.753965513 +0000 UTC m=+1223.731078264" watchObservedRunningTime="2025-09-29 21:45:45.763375755 +0000 UTC m=+1223.740488446" Sep 29 21:45:46 crc kubenswrapper[4911]: I0929 21:45:46.731221 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-cb6ffcf87-msfsz" Sep 29 21:45:49 crc kubenswrapper[4911]: I0929 21:45:49.750611 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Sep 29 21:45:50 crc kubenswrapper[4911]: I0929 21:45:50.780059 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f523c771-a76c-4854-a62f-85e929e1a24b","Type":"ContainerStarted","Data":"0998db1c2481e429d6d3cf8cf9810741850cbc9e998af86ada61bcdb74e88952"} Sep 29 21:45:50 crc kubenswrapper[4911]: I0929 21:45:50.818299 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.22273825 podStartE2EDuration="31.818285902s" podCreationTimestamp="2025-09-29 21:45:19 +0000 UTC" firstStartedPulling="2025-09-29 21:45:20.226299653 +0000 UTC m=+1198.203412324" lastFinishedPulling="2025-09-29 21:45:49.821847275 +0000 UTC m=+1227.798959976" observedRunningTime="2025-09-29 21:45:50.814732152 +0000 UTC m=+1228.791844833" watchObservedRunningTime="2025-09-29 21:45:50.818285902 +0000 UTC m=+1228.795398573" Sep 29 21:45:53 crc kubenswrapper[4911]: I0929 21:45:53.048376 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-cb6ffcf87-msfsz" Sep 29 21:45:53 crc kubenswrapper[4911]: I0929 21:45:53.117458 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-n47q5"] Sep 29 21:45:53 crc kubenswrapper[4911]: I0929 21:45:53.117739 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-67b789f86c-n47q5" podUID="b47dfd70-23bd-4c0c-badf-0ac99f95f0cd" containerName="dnsmasq-dns" containerID="cri-o://fbe1e393472fbad04d3c5afcd04c98f4b204470241778c5d4f134a305613c0b2" gracePeriod=10 Sep 29 21:45:53 crc kubenswrapper[4911]: I0929 21:45:53.581397 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67b789f86c-n47q5" Sep 29 21:45:53 crc kubenswrapper[4911]: I0929 21:45:53.686572 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b47dfd70-23bd-4c0c-badf-0ac99f95f0cd-ovsdbserver-nb\") pod \"b47dfd70-23bd-4c0c-badf-0ac99f95f0cd\" (UID: \"b47dfd70-23bd-4c0c-badf-0ac99f95f0cd\") " Sep 29 21:45:53 crc kubenswrapper[4911]: I0929 21:45:53.686637 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b47dfd70-23bd-4c0c-badf-0ac99f95f0cd-config\") pod \"b47dfd70-23bd-4c0c-badf-0ac99f95f0cd\" (UID: \"b47dfd70-23bd-4c0c-badf-0ac99f95f0cd\") " Sep 29 21:45:53 crc kubenswrapper[4911]: I0929 21:45:53.686687 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b47dfd70-23bd-4c0c-badf-0ac99f95f0cd-dns-svc\") pod \"b47dfd70-23bd-4c0c-badf-0ac99f95f0cd\" (UID: \"b47dfd70-23bd-4c0c-badf-0ac99f95f0cd\") " Sep 29 21:45:53 crc kubenswrapper[4911]: I0929 21:45:53.686731 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b47dfd70-23bd-4c0c-badf-0ac99f95f0cd-dns-swift-storage-0\") pod \"b47dfd70-23bd-4c0c-badf-0ac99f95f0cd\" (UID: \"b47dfd70-23bd-4c0c-badf-0ac99f95f0cd\") " Sep 29 21:45:53 crc kubenswrapper[4911]: I0929 21:45:53.686824 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xmfkt\" (UniqueName: \"kubernetes.io/projected/b47dfd70-23bd-4c0c-badf-0ac99f95f0cd-kube-api-access-xmfkt\") pod \"b47dfd70-23bd-4c0c-badf-0ac99f95f0cd\" (UID: \"b47dfd70-23bd-4c0c-badf-0ac99f95f0cd\") " Sep 29 21:45:53 crc kubenswrapper[4911]: I0929 21:45:53.686920 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b47dfd70-23bd-4c0c-badf-0ac99f95f0cd-ovsdbserver-sb\") pod \"b47dfd70-23bd-4c0c-badf-0ac99f95f0cd\" (UID: \"b47dfd70-23bd-4c0c-badf-0ac99f95f0cd\") " Sep 29 21:45:53 crc kubenswrapper[4911]: I0929 21:45:53.686975 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/b47dfd70-23bd-4c0c-badf-0ac99f95f0cd-openstack-edpm-ipam\") pod \"b47dfd70-23bd-4c0c-badf-0ac99f95f0cd\" (UID: \"b47dfd70-23bd-4c0c-badf-0ac99f95f0cd\") " Sep 29 21:45:53 crc kubenswrapper[4911]: I0929 21:45:53.708680 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b47dfd70-23bd-4c0c-badf-0ac99f95f0cd-kube-api-access-xmfkt" (OuterVolumeSpecName: "kube-api-access-xmfkt") pod "b47dfd70-23bd-4c0c-badf-0ac99f95f0cd" (UID: "b47dfd70-23bd-4c0c-badf-0ac99f95f0cd"). InnerVolumeSpecName "kube-api-access-xmfkt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:45:53 crc kubenswrapper[4911]: I0929 21:45:53.747164 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b47dfd70-23bd-4c0c-badf-0ac99f95f0cd-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "b47dfd70-23bd-4c0c-badf-0ac99f95f0cd" (UID: "b47dfd70-23bd-4c0c-badf-0ac99f95f0cd"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:45:53 crc kubenswrapper[4911]: I0929 21:45:53.750274 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b47dfd70-23bd-4c0c-badf-0ac99f95f0cd-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "b47dfd70-23bd-4c0c-badf-0ac99f95f0cd" (UID: "b47dfd70-23bd-4c0c-badf-0ac99f95f0cd"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:45:53 crc kubenswrapper[4911]: I0929 21:45:53.750890 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b47dfd70-23bd-4c0c-badf-0ac99f95f0cd-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "b47dfd70-23bd-4c0c-badf-0ac99f95f0cd" (UID: "b47dfd70-23bd-4c0c-badf-0ac99f95f0cd"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:45:53 crc kubenswrapper[4911]: I0929 21:45:53.751489 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b47dfd70-23bd-4c0c-badf-0ac99f95f0cd-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "b47dfd70-23bd-4c0c-badf-0ac99f95f0cd" (UID: "b47dfd70-23bd-4c0c-badf-0ac99f95f0cd"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:45:53 crc kubenswrapper[4911]: I0929 21:45:53.756268 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b47dfd70-23bd-4c0c-badf-0ac99f95f0cd-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "b47dfd70-23bd-4c0c-badf-0ac99f95f0cd" (UID: "b47dfd70-23bd-4c0c-badf-0ac99f95f0cd"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:45:53 crc kubenswrapper[4911]: I0929 21:45:53.767752 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b47dfd70-23bd-4c0c-badf-0ac99f95f0cd-config" (OuterVolumeSpecName: "config") pod "b47dfd70-23bd-4c0c-badf-0ac99f95f0cd" (UID: "b47dfd70-23bd-4c0c-badf-0ac99f95f0cd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:45:53 crc kubenswrapper[4911]: I0929 21:45:53.789405 4911 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b47dfd70-23bd-4c0c-badf-0ac99f95f0cd-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 21:45:53 crc kubenswrapper[4911]: I0929 21:45:53.789442 4911 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/b47dfd70-23bd-4c0c-badf-0ac99f95f0cd-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Sep 29 21:45:53 crc kubenswrapper[4911]: I0929 21:45:53.789452 4911 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b47dfd70-23bd-4c0c-badf-0ac99f95f0cd-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 21:45:53 crc kubenswrapper[4911]: I0929 21:45:53.789462 4911 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b47dfd70-23bd-4c0c-badf-0ac99f95f0cd-config\") on node \"crc\" DevicePath \"\"" Sep 29 21:45:53 crc kubenswrapper[4911]: I0929 21:45:53.789470 4911 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b47dfd70-23bd-4c0c-badf-0ac99f95f0cd-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 21:45:53 crc kubenswrapper[4911]: I0929 21:45:53.789478 4911 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b47dfd70-23bd-4c0c-badf-0ac99f95f0cd-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 29 21:45:53 crc kubenswrapper[4911]: I0929 21:45:53.789490 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xmfkt\" (UniqueName: \"kubernetes.io/projected/b47dfd70-23bd-4c0c-badf-0ac99f95f0cd-kube-api-access-xmfkt\") on node \"crc\" DevicePath \"\"" Sep 29 21:45:53 crc kubenswrapper[4911]: I0929 21:45:53.811869 4911 generic.go:334] "Generic (PLEG): container finished" podID="b47dfd70-23bd-4c0c-badf-0ac99f95f0cd" containerID="fbe1e393472fbad04d3c5afcd04c98f4b204470241778c5d4f134a305613c0b2" exitCode=0 Sep 29 21:45:53 crc kubenswrapper[4911]: I0929 21:45:53.811929 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67b789f86c-n47q5" event={"ID":"b47dfd70-23bd-4c0c-badf-0ac99f95f0cd","Type":"ContainerDied","Data":"fbe1e393472fbad04d3c5afcd04c98f4b204470241778c5d4f134a305613c0b2"} Sep 29 21:45:53 crc kubenswrapper[4911]: I0929 21:45:53.811940 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67b789f86c-n47q5" Sep 29 21:45:53 crc kubenswrapper[4911]: I0929 21:45:53.812135 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67b789f86c-n47q5" event={"ID":"b47dfd70-23bd-4c0c-badf-0ac99f95f0cd","Type":"ContainerDied","Data":"3580aa71f640cb9c541f78e26ee8e776e88479f28bbfa72cfad46c79149f2c21"} Sep 29 21:45:53 crc kubenswrapper[4911]: I0929 21:45:53.812266 4911 scope.go:117] "RemoveContainer" containerID="fbe1e393472fbad04d3c5afcd04c98f4b204470241778c5d4f134a305613c0b2" Sep 29 21:45:53 crc kubenswrapper[4911]: I0929 21:45:53.842014 4911 scope.go:117] "RemoveContainer" containerID="7005b0dfeed6ee19254bbfe18e1b5b48576b2b3b900e6199bfc327f573151748" Sep 29 21:45:53 crc kubenswrapper[4911]: I0929 21:45:53.850828 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-n47q5"] Sep 29 21:45:53 crc kubenswrapper[4911]: I0929 21:45:53.863225 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-n47q5"] Sep 29 21:45:53 crc kubenswrapper[4911]: I0929 21:45:53.876873 4911 scope.go:117] "RemoveContainer" containerID="fbe1e393472fbad04d3c5afcd04c98f4b204470241778c5d4f134a305613c0b2" Sep 29 21:45:53 crc kubenswrapper[4911]: E0929 21:45:53.877288 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fbe1e393472fbad04d3c5afcd04c98f4b204470241778c5d4f134a305613c0b2\": container with ID starting with fbe1e393472fbad04d3c5afcd04c98f4b204470241778c5d4f134a305613c0b2 not found: ID does not exist" containerID="fbe1e393472fbad04d3c5afcd04c98f4b204470241778c5d4f134a305613c0b2" Sep 29 21:45:53 crc kubenswrapper[4911]: I0929 21:45:53.877318 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fbe1e393472fbad04d3c5afcd04c98f4b204470241778c5d4f134a305613c0b2"} err="failed to get container status \"fbe1e393472fbad04d3c5afcd04c98f4b204470241778c5d4f134a305613c0b2\": rpc error: code = NotFound desc = could not find container \"fbe1e393472fbad04d3c5afcd04c98f4b204470241778c5d4f134a305613c0b2\": container with ID starting with fbe1e393472fbad04d3c5afcd04c98f4b204470241778c5d4f134a305613c0b2 not found: ID does not exist" Sep 29 21:45:53 crc kubenswrapper[4911]: I0929 21:45:53.877340 4911 scope.go:117] "RemoveContainer" containerID="7005b0dfeed6ee19254bbfe18e1b5b48576b2b3b900e6199bfc327f573151748" Sep 29 21:45:53 crc kubenswrapper[4911]: E0929 21:45:53.877657 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7005b0dfeed6ee19254bbfe18e1b5b48576b2b3b900e6199bfc327f573151748\": container with ID starting with 7005b0dfeed6ee19254bbfe18e1b5b48576b2b3b900e6199bfc327f573151748 not found: ID does not exist" containerID="7005b0dfeed6ee19254bbfe18e1b5b48576b2b3b900e6199bfc327f573151748" Sep 29 21:45:53 crc kubenswrapper[4911]: I0929 21:45:53.877731 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7005b0dfeed6ee19254bbfe18e1b5b48576b2b3b900e6199bfc327f573151748"} err="failed to get container status \"7005b0dfeed6ee19254bbfe18e1b5b48576b2b3b900e6199bfc327f573151748\": rpc error: code = NotFound desc = could not find container \"7005b0dfeed6ee19254bbfe18e1b5b48576b2b3b900e6199bfc327f573151748\": container with ID starting with 7005b0dfeed6ee19254bbfe18e1b5b48576b2b3b900e6199bfc327f573151748 not found: ID does not exist" Sep 29 21:45:54 crc kubenswrapper[4911]: I0929 21:45:54.717921 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b47dfd70-23bd-4c0c-badf-0ac99f95f0cd" path="/var/lib/kubelet/pods/b47dfd70-23bd-4c0c-badf-0ac99f95f0cd/volumes" Sep 29 21:45:55 crc kubenswrapper[4911]: I0929 21:45:55.211364 4911 patch_prober.go:28] interesting pod/machine-config-daemon-w647f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 21:45:55 crc kubenswrapper[4911]: I0929 21:45:55.211419 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 21:46:01 crc kubenswrapper[4911]: I0929 21:46:01.849862 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zp5fn"] Sep 29 21:46:01 crc kubenswrapper[4911]: E0929 21:46:01.850837 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b47dfd70-23bd-4c0c-badf-0ac99f95f0cd" containerName="init" Sep 29 21:46:01 crc kubenswrapper[4911]: I0929 21:46:01.850852 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="b47dfd70-23bd-4c0c-badf-0ac99f95f0cd" containerName="init" Sep 29 21:46:01 crc kubenswrapper[4911]: E0929 21:46:01.850875 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="037d778b-c252-409b-9b11-0bd0911d7d4f" containerName="dnsmasq-dns" Sep 29 21:46:01 crc kubenswrapper[4911]: I0929 21:46:01.850884 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="037d778b-c252-409b-9b11-0bd0911d7d4f" containerName="dnsmasq-dns" Sep 29 21:46:01 crc kubenswrapper[4911]: E0929 21:46:01.850897 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b47dfd70-23bd-4c0c-badf-0ac99f95f0cd" containerName="dnsmasq-dns" Sep 29 21:46:01 crc kubenswrapper[4911]: I0929 21:46:01.850905 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="b47dfd70-23bd-4c0c-badf-0ac99f95f0cd" containerName="dnsmasq-dns" Sep 29 21:46:01 crc kubenswrapper[4911]: E0929 21:46:01.850926 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="037d778b-c252-409b-9b11-0bd0911d7d4f" containerName="init" Sep 29 21:46:01 crc kubenswrapper[4911]: I0929 21:46:01.850933 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="037d778b-c252-409b-9b11-0bd0911d7d4f" containerName="init" Sep 29 21:46:01 crc kubenswrapper[4911]: I0929 21:46:01.851156 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="037d778b-c252-409b-9b11-0bd0911d7d4f" containerName="dnsmasq-dns" Sep 29 21:46:01 crc kubenswrapper[4911]: I0929 21:46:01.851185 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="b47dfd70-23bd-4c0c-badf-0ac99f95f0cd" containerName="dnsmasq-dns" Sep 29 21:46:01 crc kubenswrapper[4911]: I0929 21:46:01.851892 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zp5fn" Sep 29 21:46:01 crc kubenswrapper[4911]: I0929 21:46:01.855759 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 21:46:01 crc kubenswrapper[4911]: I0929 21:46:01.856449 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-h4cgt" Sep 29 21:46:01 crc kubenswrapper[4911]: I0929 21:46:01.856533 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 21:46:01 crc kubenswrapper[4911]: I0929 21:46:01.856774 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 21:46:01 crc kubenswrapper[4911]: I0929 21:46:01.872024 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zp5fn"] Sep 29 21:46:01 crc kubenswrapper[4911]: I0929 21:46:01.954904 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44ed0dd9-96df-4d55-b788-5e82df516063-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-zp5fn\" (UID: \"44ed0dd9-96df-4d55-b788-5e82df516063\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zp5fn" Sep 29 21:46:01 crc kubenswrapper[4911]: I0929 21:46:01.954960 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4cgtt\" (UniqueName: \"kubernetes.io/projected/44ed0dd9-96df-4d55-b788-5e82df516063-kube-api-access-4cgtt\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-zp5fn\" (UID: \"44ed0dd9-96df-4d55-b788-5e82df516063\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zp5fn" Sep 29 21:46:01 crc kubenswrapper[4911]: I0929 21:46:01.955005 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/44ed0dd9-96df-4d55-b788-5e82df516063-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-zp5fn\" (UID: \"44ed0dd9-96df-4d55-b788-5e82df516063\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zp5fn" Sep 29 21:46:01 crc kubenswrapper[4911]: I0929 21:46:01.955621 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/44ed0dd9-96df-4d55-b788-5e82df516063-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-zp5fn\" (UID: \"44ed0dd9-96df-4d55-b788-5e82df516063\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zp5fn" Sep 29 21:46:02 crc kubenswrapper[4911]: I0929 21:46:02.057693 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/44ed0dd9-96df-4d55-b788-5e82df516063-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-zp5fn\" (UID: \"44ed0dd9-96df-4d55-b788-5e82df516063\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zp5fn" Sep 29 21:46:02 crc kubenswrapper[4911]: I0929 21:46:02.057915 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/44ed0dd9-96df-4d55-b788-5e82df516063-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-zp5fn\" (UID: \"44ed0dd9-96df-4d55-b788-5e82df516063\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zp5fn" Sep 29 21:46:02 crc kubenswrapper[4911]: I0929 21:46:02.057984 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44ed0dd9-96df-4d55-b788-5e82df516063-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-zp5fn\" (UID: \"44ed0dd9-96df-4d55-b788-5e82df516063\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zp5fn" Sep 29 21:46:02 crc kubenswrapper[4911]: I0929 21:46:02.058019 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4cgtt\" (UniqueName: \"kubernetes.io/projected/44ed0dd9-96df-4d55-b788-5e82df516063-kube-api-access-4cgtt\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-zp5fn\" (UID: \"44ed0dd9-96df-4d55-b788-5e82df516063\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zp5fn" Sep 29 21:46:02 crc kubenswrapper[4911]: I0929 21:46:02.065033 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/44ed0dd9-96df-4d55-b788-5e82df516063-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-zp5fn\" (UID: \"44ed0dd9-96df-4d55-b788-5e82df516063\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zp5fn" Sep 29 21:46:02 crc kubenswrapper[4911]: I0929 21:46:02.065377 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/44ed0dd9-96df-4d55-b788-5e82df516063-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-zp5fn\" (UID: \"44ed0dd9-96df-4d55-b788-5e82df516063\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zp5fn" Sep 29 21:46:02 crc kubenswrapper[4911]: I0929 21:46:02.065504 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44ed0dd9-96df-4d55-b788-5e82df516063-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-zp5fn\" (UID: \"44ed0dd9-96df-4d55-b788-5e82df516063\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zp5fn" Sep 29 21:46:02 crc kubenswrapper[4911]: I0929 21:46:02.082498 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4cgtt\" (UniqueName: \"kubernetes.io/projected/44ed0dd9-96df-4d55-b788-5e82df516063-kube-api-access-4cgtt\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-zp5fn\" (UID: \"44ed0dd9-96df-4d55-b788-5e82df516063\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zp5fn" Sep 29 21:46:02 crc kubenswrapper[4911]: I0929 21:46:02.187427 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zp5fn" Sep 29 21:46:02 crc kubenswrapper[4911]: I0929 21:46:02.813676 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zp5fn"] Sep 29 21:46:02 crc kubenswrapper[4911]: I0929 21:46:02.918021 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zp5fn" event={"ID":"44ed0dd9-96df-4d55-b788-5e82df516063","Type":"ContainerStarted","Data":"da00c52f92fcd2367c8b65e9003a61364f5dc767733d2a7eb9872f919d276066"} Sep 29 21:46:07 crc kubenswrapper[4911]: I0929 21:46:07.971355 4911 generic.go:334] "Generic (PLEG): container finished" podID="ba53369f-42c7-4fb2-82e2-cf4eaebcedd7" containerID="a712701ef091deab413354f02a87ee344adf89fbf3c31005b78d9cffad57e3fa" exitCode=0 Sep 29 21:46:07 crc kubenswrapper[4911]: I0929 21:46:07.971424 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"ba53369f-42c7-4fb2-82e2-cf4eaebcedd7","Type":"ContainerDied","Data":"a712701ef091deab413354f02a87ee344adf89fbf3c31005b78d9cffad57e3fa"} Sep 29 21:46:07 crc kubenswrapper[4911]: I0929 21:46:07.974957 4911 generic.go:334] "Generic (PLEG): container finished" podID="a4102e69-ec0f-43d2-aaf9-0b760d487420" containerID="e85ad334161926b9d0af6f60984eb85be0cdcc1ec5a3b0286b01ada679b78411" exitCode=0 Sep 29 21:46:07 crc kubenswrapper[4911]: I0929 21:46:07.974999 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"a4102e69-ec0f-43d2-aaf9-0b760d487420","Type":"ContainerDied","Data":"e85ad334161926b9d0af6f60984eb85be0cdcc1ec5a3b0286b01ada679b78411"} Sep 29 21:46:11 crc kubenswrapper[4911]: I0929 21:46:11.024895 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zp5fn" event={"ID":"44ed0dd9-96df-4d55-b788-5e82df516063","Type":"ContainerStarted","Data":"7e587c3b6181a679fa0cda53e0845d54739fa46903ce5d67e5b10db6bfe8b161"} Sep 29 21:46:11 crc kubenswrapper[4911]: I0929 21:46:11.027638 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"a4102e69-ec0f-43d2-aaf9-0b760d487420","Type":"ContainerStarted","Data":"e482f04c632e3959bd2b649ee12a825dc41bf725dd45841e85a29568beeef780"} Sep 29 21:46:11 crc kubenswrapper[4911]: I0929 21:46:11.027872 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Sep 29 21:46:11 crc kubenswrapper[4911]: I0929 21:46:11.032696 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"ba53369f-42c7-4fb2-82e2-cf4eaebcedd7","Type":"ContainerStarted","Data":"159b80087b8dc23f6f291855225d6d98fad6d35a92c4a9c2f90f7bccbc35c753"} Sep 29 21:46:11 crc kubenswrapper[4911]: I0929 21:46:11.032984 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:46:11 crc kubenswrapper[4911]: I0929 21:46:11.049288 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zp5fn" podStartSLOduration=2.132741704 podStartE2EDuration="10.049260899s" podCreationTimestamp="2025-09-29 21:46:01 +0000 UTC" firstStartedPulling="2025-09-29 21:46:02.819350629 +0000 UTC m=+1240.796463310" lastFinishedPulling="2025-09-29 21:46:10.735869814 +0000 UTC m=+1248.712982505" observedRunningTime="2025-09-29 21:46:11.041891369 +0000 UTC m=+1249.019004110" watchObservedRunningTime="2025-09-29 21:46:11.049260899 +0000 UTC m=+1249.026373580" Sep 29 21:46:11 crc kubenswrapper[4911]: I0929 21:46:11.072569 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=36.072550874 podStartE2EDuration="36.072550874s" podCreationTimestamp="2025-09-29 21:45:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:46:11.065767853 +0000 UTC m=+1249.042880534" watchObservedRunningTime="2025-09-29 21:46:11.072550874 +0000 UTC m=+1249.049663545" Sep 29 21:46:23 crc kubenswrapper[4911]: I0929 21:46:23.163805 4911 generic.go:334] "Generic (PLEG): container finished" podID="44ed0dd9-96df-4d55-b788-5e82df516063" containerID="7e587c3b6181a679fa0cda53e0845d54739fa46903ce5d67e5b10db6bfe8b161" exitCode=0 Sep 29 21:46:23 crc kubenswrapper[4911]: I0929 21:46:23.163932 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zp5fn" event={"ID":"44ed0dd9-96df-4d55-b788-5e82df516063","Type":"ContainerDied","Data":"7e587c3b6181a679fa0cda53e0845d54739fa46903ce5d67e5b10db6bfe8b161"} Sep 29 21:46:23 crc kubenswrapper[4911]: I0929 21:46:23.186912 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=48.186892738 podStartE2EDuration="48.186892738s" podCreationTimestamp="2025-09-29 21:45:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 21:46:11.099835733 +0000 UTC m=+1249.076948414" watchObservedRunningTime="2025-09-29 21:46:23.186892738 +0000 UTC m=+1261.164005409" Sep 29 21:46:24 crc kubenswrapper[4911]: I0929 21:46:24.629016 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zp5fn" Sep 29 21:46:24 crc kubenswrapper[4911]: I0929 21:46:24.718294 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44ed0dd9-96df-4d55-b788-5e82df516063-repo-setup-combined-ca-bundle\") pod \"44ed0dd9-96df-4d55-b788-5e82df516063\" (UID: \"44ed0dd9-96df-4d55-b788-5e82df516063\") " Sep 29 21:46:24 crc kubenswrapper[4911]: I0929 21:46:24.718393 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/44ed0dd9-96df-4d55-b788-5e82df516063-ssh-key\") pod \"44ed0dd9-96df-4d55-b788-5e82df516063\" (UID: \"44ed0dd9-96df-4d55-b788-5e82df516063\") " Sep 29 21:46:24 crc kubenswrapper[4911]: I0929 21:46:24.718426 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4cgtt\" (UniqueName: \"kubernetes.io/projected/44ed0dd9-96df-4d55-b788-5e82df516063-kube-api-access-4cgtt\") pod \"44ed0dd9-96df-4d55-b788-5e82df516063\" (UID: \"44ed0dd9-96df-4d55-b788-5e82df516063\") " Sep 29 21:46:24 crc kubenswrapper[4911]: I0929 21:46:24.718549 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/44ed0dd9-96df-4d55-b788-5e82df516063-inventory\") pod \"44ed0dd9-96df-4d55-b788-5e82df516063\" (UID: \"44ed0dd9-96df-4d55-b788-5e82df516063\") " Sep 29 21:46:24 crc kubenswrapper[4911]: I0929 21:46:24.726461 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/44ed0dd9-96df-4d55-b788-5e82df516063-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "44ed0dd9-96df-4d55-b788-5e82df516063" (UID: "44ed0dd9-96df-4d55-b788-5e82df516063"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:46:24 crc kubenswrapper[4911]: I0929 21:46:24.727619 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44ed0dd9-96df-4d55-b788-5e82df516063-kube-api-access-4cgtt" (OuterVolumeSpecName: "kube-api-access-4cgtt") pod "44ed0dd9-96df-4d55-b788-5e82df516063" (UID: "44ed0dd9-96df-4d55-b788-5e82df516063"). InnerVolumeSpecName "kube-api-access-4cgtt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:46:24 crc kubenswrapper[4911]: I0929 21:46:24.754543 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/44ed0dd9-96df-4d55-b788-5e82df516063-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "44ed0dd9-96df-4d55-b788-5e82df516063" (UID: "44ed0dd9-96df-4d55-b788-5e82df516063"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:46:24 crc kubenswrapper[4911]: I0929 21:46:24.759250 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/44ed0dd9-96df-4d55-b788-5e82df516063-inventory" (OuterVolumeSpecName: "inventory") pod "44ed0dd9-96df-4d55-b788-5e82df516063" (UID: "44ed0dd9-96df-4d55-b788-5e82df516063"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:46:24 crc kubenswrapper[4911]: I0929 21:46:24.821022 4911 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/44ed0dd9-96df-4d55-b788-5e82df516063-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 21:46:24 crc kubenswrapper[4911]: I0929 21:46:24.821065 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4cgtt\" (UniqueName: \"kubernetes.io/projected/44ed0dd9-96df-4d55-b788-5e82df516063-kube-api-access-4cgtt\") on node \"crc\" DevicePath \"\"" Sep 29 21:46:24 crc kubenswrapper[4911]: I0929 21:46:24.821078 4911 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/44ed0dd9-96df-4d55-b788-5e82df516063-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 21:46:24 crc kubenswrapper[4911]: I0929 21:46:24.821091 4911 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44ed0dd9-96df-4d55-b788-5e82df516063-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 21:46:25 crc kubenswrapper[4911]: I0929 21:46:25.190107 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zp5fn" event={"ID":"44ed0dd9-96df-4d55-b788-5e82df516063","Type":"ContainerDied","Data":"da00c52f92fcd2367c8b65e9003a61364f5dc767733d2a7eb9872f919d276066"} Sep 29 21:46:25 crc kubenswrapper[4911]: I0929 21:46:25.190152 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="da00c52f92fcd2367c8b65e9003a61364f5dc767733d2a7eb9872f919d276066" Sep 29 21:46:25 crc kubenswrapper[4911]: I0929 21:46:25.190174 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zp5fn" Sep 29 21:46:25 crc kubenswrapper[4911]: I0929 21:46:25.212987 4911 patch_prober.go:28] interesting pod/machine-config-daemon-w647f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 21:46:25 crc kubenswrapper[4911]: I0929 21:46:25.213043 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 21:46:25 crc kubenswrapper[4911]: I0929 21:46:25.289088 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-hp9bp"] Sep 29 21:46:25 crc kubenswrapper[4911]: E0929 21:46:25.289514 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="44ed0dd9-96df-4d55-b788-5e82df516063" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Sep 29 21:46:25 crc kubenswrapper[4911]: I0929 21:46:25.289536 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="44ed0dd9-96df-4d55-b788-5e82df516063" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Sep 29 21:46:25 crc kubenswrapper[4911]: I0929 21:46:25.289757 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="44ed0dd9-96df-4d55-b788-5e82df516063" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Sep 29 21:46:25 crc kubenswrapper[4911]: I0929 21:46:25.290507 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-hp9bp" Sep 29 21:46:25 crc kubenswrapper[4911]: I0929 21:46:25.293412 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 21:46:25 crc kubenswrapper[4911]: I0929 21:46:25.293580 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 21:46:25 crc kubenswrapper[4911]: I0929 21:46:25.293775 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 21:46:25 crc kubenswrapper[4911]: I0929 21:46:25.297471 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-h4cgt" Sep 29 21:46:25 crc kubenswrapper[4911]: I0929 21:46:25.307241 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-hp9bp"] Sep 29 21:46:25 crc kubenswrapper[4911]: I0929 21:46:25.331256 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/170521df-782e-45b7-9ae9-389fff67083e-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-hp9bp\" (UID: \"170521df-782e-45b7-9ae9-389fff67083e\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-hp9bp" Sep 29 21:46:25 crc kubenswrapper[4911]: I0929 21:46:25.331346 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rbxqb\" (UniqueName: \"kubernetes.io/projected/170521df-782e-45b7-9ae9-389fff67083e-kube-api-access-rbxqb\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-hp9bp\" (UID: \"170521df-782e-45b7-9ae9-389fff67083e\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-hp9bp" Sep 29 21:46:25 crc kubenswrapper[4911]: I0929 21:46:25.331442 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/170521df-782e-45b7-9ae9-389fff67083e-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-hp9bp\" (UID: \"170521df-782e-45b7-9ae9-389fff67083e\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-hp9bp" Sep 29 21:46:25 crc kubenswrapper[4911]: I0929 21:46:25.432662 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rbxqb\" (UniqueName: \"kubernetes.io/projected/170521df-782e-45b7-9ae9-389fff67083e-kube-api-access-rbxqb\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-hp9bp\" (UID: \"170521df-782e-45b7-9ae9-389fff67083e\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-hp9bp" Sep 29 21:46:25 crc kubenswrapper[4911]: I0929 21:46:25.432771 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/170521df-782e-45b7-9ae9-389fff67083e-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-hp9bp\" (UID: \"170521df-782e-45b7-9ae9-389fff67083e\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-hp9bp" Sep 29 21:46:25 crc kubenswrapper[4911]: I0929 21:46:25.432853 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/170521df-782e-45b7-9ae9-389fff67083e-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-hp9bp\" (UID: \"170521df-782e-45b7-9ae9-389fff67083e\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-hp9bp" Sep 29 21:46:25 crc kubenswrapper[4911]: I0929 21:46:25.438939 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/170521df-782e-45b7-9ae9-389fff67083e-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-hp9bp\" (UID: \"170521df-782e-45b7-9ae9-389fff67083e\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-hp9bp" Sep 29 21:46:25 crc kubenswrapper[4911]: I0929 21:46:25.440936 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/170521df-782e-45b7-9ae9-389fff67083e-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-hp9bp\" (UID: \"170521df-782e-45b7-9ae9-389fff67083e\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-hp9bp" Sep 29 21:46:25 crc kubenswrapper[4911]: E0929 21:46:25.444849 4911 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod44ed0dd9_96df_4d55_b788_5e82df516063.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod44ed0dd9_96df_4d55_b788_5e82df516063.slice/crio-da00c52f92fcd2367c8b65e9003a61364f5dc767733d2a7eb9872f919d276066\": RecentStats: unable to find data in memory cache]" Sep 29 21:46:25 crc kubenswrapper[4911]: I0929 21:46:25.452581 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rbxqb\" (UniqueName: \"kubernetes.io/projected/170521df-782e-45b7-9ae9-389fff67083e-kube-api-access-rbxqb\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-hp9bp\" (UID: \"170521df-782e-45b7-9ae9-389fff67083e\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-hp9bp" Sep 29 21:46:25 crc kubenswrapper[4911]: I0929 21:46:25.613951 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-hp9bp" Sep 29 21:46:26 crc kubenswrapper[4911]: I0929 21:46:26.005612 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-hp9bp"] Sep 29 21:46:26 crc kubenswrapper[4911]: W0929 21:46:26.009332 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod170521df_782e_45b7_9ae9_389fff67083e.slice/crio-56ea049d612b6e9015a6981aa8bedad3b66099b766b30c5c80274fd996b5e4e2 WatchSource:0}: Error finding container 56ea049d612b6e9015a6981aa8bedad3b66099b766b30c5c80274fd996b5e4e2: Status 404 returned error can't find the container with id 56ea049d612b6e9015a6981aa8bedad3b66099b766b30c5c80274fd996b5e4e2 Sep 29 21:46:26 crc kubenswrapper[4911]: I0929 21:46:26.024000 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Sep 29 21:46:26 crc kubenswrapper[4911]: I0929 21:46:26.039067 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Sep 29 21:46:26 crc kubenswrapper[4911]: I0929 21:46:26.201532 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-hp9bp" event={"ID":"170521df-782e-45b7-9ae9-389fff67083e","Type":"ContainerStarted","Data":"56ea049d612b6e9015a6981aa8bedad3b66099b766b30c5c80274fd996b5e4e2"} Sep 29 21:46:27 crc kubenswrapper[4911]: I0929 21:46:27.210862 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-hp9bp" event={"ID":"170521df-782e-45b7-9ae9-389fff67083e","Type":"ContainerStarted","Data":"6f8bbef0acec5213c638f2fb8cd4ebc6011e9806ebc4ec3e4bdc35132c21d2b6"} Sep 29 21:46:27 crc kubenswrapper[4911]: I0929 21:46:27.229111 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-hp9bp" podStartSLOduration=1.673674165 podStartE2EDuration="2.229088974s" podCreationTimestamp="2025-09-29 21:46:25 +0000 UTC" firstStartedPulling="2025-09-29 21:46:26.012676269 +0000 UTC m=+1263.989788940" lastFinishedPulling="2025-09-29 21:46:26.568091078 +0000 UTC m=+1264.545203749" observedRunningTime="2025-09-29 21:46:27.227984589 +0000 UTC m=+1265.205097270" watchObservedRunningTime="2025-09-29 21:46:27.229088974 +0000 UTC m=+1265.206201655" Sep 29 21:46:30 crc kubenswrapper[4911]: I0929 21:46:30.240996 4911 generic.go:334] "Generic (PLEG): container finished" podID="170521df-782e-45b7-9ae9-389fff67083e" containerID="6f8bbef0acec5213c638f2fb8cd4ebc6011e9806ebc4ec3e4bdc35132c21d2b6" exitCode=0 Sep 29 21:46:30 crc kubenswrapper[4911]: I0929 21:46:30.241065 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-hp9bp" event={"ID":"170521df-782e-45b7-9ae9-389fff67083e","Type":"ContainerDied","Data":"6f8bbef0acec5213c638f2fb8cd4ebc6011e9806ebc4ec3e4bdc35132c21d2b6"} Sep 29 21:46:31 crc kubenswrapper[4911]: I0929 21:46:31.706279 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-hp9bp" Sep 29 21:46:31 crc kubenswrapper[4911]: I0929 21:46:31.768151 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rbxqb\" (UniqueName: \"kubernetes.io/projected/170521df-782e-45b7-9ae9-389fff67083e-kube-api-access-rbxqb\") pod \"170521df-782e-45b7-9ae9-389fff67083e\" (UID: \"170521df-782e-45b7-9ae9-389fff67083e\") " Sep 29 21:46:31 crc kubenswrapper[4911]: I0929 21:46:31.768362 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/170521df-782e-45b7-9ae9-389fff67083e-inventory\") pod \"170521df-782e-45b7-9ae9-389fff67083e\" (UID: \"170521df-782e-45b7-9ae9-389fff67083e\") " Sep 29 21:46:31 crc kubenswrapper[4911]: I0929 21:46:31.768399 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/170521df-782e-45b7-9ae9-389fff67083e-ssh-key\") pod \"170521df-782e-45b7-9ae9-389fff67083e\" (UID: \"170521df-782e-45b7-9ae9-389fff67083e\") " Sep 29 21:46:31 crc kubenswrapper[4911]: I0929 21:46:31.775661 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/170521df-782e-45b7-9ae9-389fff67083e-kube-api-access-rbxqb" (OuterVolumeSpecName: "kube-api-access-rbxqb") pod "170521df-782e-45b7-9ae9-389fff67083e" (UID: "170521df-782e-45b7-9ae9-389fff67083e"). InnerVolumeSpecName "kube-api-access-rbxqb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:46:31 crc kubenswrapper[4911]: I0929 21:46:31.803572 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/170521df-782e-45b7-9ae9-389fff67083e-inventory" (OuterVolumeSpecName: "inventory") pod "170521df-782e-45b7-9ae9-389fff67083e" (UID: "170521df-782e-45b7-9ae9-389fff67083e"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:46:31 crc kubenswrapper[4911]: I0929 21:46:31.813497 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/170521df-782e-45b7-9ae9-389fff67083e-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "170521df-782e-45b7-9ae9-389fff67083e" (UID: "170521df-782e-45b7-9ae9-389fff67083e"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:46:31 crc kubenswrapper[4911]: I0929 21:46:31.870882 4911 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/170521df-782e-45b7-9ae9-389fff67083e-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 21:46:31 crc kubenswrapper[4911]: I0929 21:46:31.870915 4911 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/170521df-782e-45b7-9ae9-389fff67083e-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 21:46:31 crc kubenswrapper[4911]: I0929 21:46:31.870926 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rbxqb\" (UniqueName: \"kubernetes.io/projected/170521df-782e-45b7-9ae9-389fff67083e-kube-api-access-rbxqb\") on node \"crc\" DevicePath \"\"" Sep 29 21:46:32 crc kubenswrapper[4911]: I0929 21:46:32.265715 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-hp9bp" event={"ID":"170521df-782e-45b7-9ae9-389fff67083e","Type":"ContainerDied","Data":"56ea049d612b6e9015a6981aa8bedad3b66099b766b30c5c80274fd996b5e4e2"} Sep 29 21:46:32 crc kubenswrapper[4911]: I0929 21:46:32.265780 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="56ea049d612b6e9015a6981aa8bedad3b66099b766b30c5c80274fd996b5e4e2" Sep 29 21:46:32 crc kubenswrapper[4911]: I0929 21:46:32.265907 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-hp9bp" Sep 29 21:46:32 crc kubenswrapper[4911]: I0929 21:46:32.348098 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6srbt"] Sep 29 21:46:32 crc kubenswrapper[4911]: E0929 21:46:32.348964 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="170521df-782e-45b7-9ae9-389fff67083e" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Sep 29 21:46:32 crc kubenswrapper[4911]: I0929 21:46:32.348998 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="170521df-782e-45b7-9ae9-389fff67083e" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Sep 29 21:46:32 crc kubenswrapper[4911]: I0929 21:46:32.349384 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="170521df-782e-45b7-9ae9-389fff67083e" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Sep 29 21:46:32 crc kubenswrapper[4911]: I0929 21:46:32.350450 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6srbt" Sep 29 21:46:32 crc kubenswrapper[4911]: I0929 21:46:32.358204 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 21:46:32 crc kubenswrapper[4911]: I0929 21:46:32.358347 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 21:46:32 crc kubenswrapper[4911]: I0929 21:46:32.358799 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 21:46:32 crc kubenswrapper[4911]: I0929 21:46:32.359154 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-h4cgt" Sep 29 21:46:32 crc kubenswrapper[4911]: I0929 21:46:32.363548 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6srbt"] Sep 29 21:46:32 crc kubenswrapper[4911]: I0929 21:46:32.380830 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8a132bff-9655-4b4f-9574-ff04307fa051-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-6srbt\" (UID: \"8a132bff-9655-4b4f-9574-ff04307fa051\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6srbt" Sep 29 21:46:32 crc kubenswrapper[4911]: I0929 21:46:32.380999 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8a132bff-9655-4b4f-9574-ff04307fa051-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-6srbt\" (UID: \"8a132bff-9655-4b4f-9574-ff04307fa051\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6srbt" Sep 29 21:46:32 crc kubenswrapper[4911]: I0929 21:46:32.381060 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a132bff-9655-4b4f-9574-ff04307fa051-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-6srbt\" (UID: \"8a132bff-9655-4b4f-9574-ff04307fa051\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6srbt" Sep 29 21:46:32 crc kubenswrapper[4911]: I0929 21:46:32.381117 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s2654\" (UniqueName: \"kubernetes.io/projected/8a132bff-9655-4b4f-9574-ff04307fa051-kube-api-access-s2654\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-6srbt\" (UID: \"8a132bff-9655-4b4f-9574-ff04307fa051\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6srbt" Sep 29 21:46:32 crc kubenswrapper[4911]: I0929 21:46:32.483089 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8a132bff-9655-4b4f-9574-ff04307fa051-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-6srbt\" (UID: \"8a132bff-9655-4b4f-9574-ff04307fa051\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6srbt" Sep 29 21:46:32 crc kubenswrapper[4911]: I0929 21:46:32.483207 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8a132bff-9655-4b4f-9574-ff04307fa051-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-6srbt\" (UID: \"8a132bff-9655-4b4f-9574-ff04307fa051\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6srbt" Sep 29 21:46:32 crc kubenswrapper[4911]: I0929 21:46:32.483247 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a132bff-9655-4b4f-9574-ff04307fa051-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-6srbt\" (UID: \"8a132bff-9655-4b4f-9574-ff04307fa051\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6srbt" Sep 29 21:46:32 crc kubenswrapper[4911]: I0929 21:46:32.483284 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2654\" (UniqueName: \"kubernetes.io/projected/8a132bff-9655-4b4f-9574-ff04307fa051-kube-api-access-s2654\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-6srbt\" (UID: \"8a132bff-9655-4b4f-9574-ff04307fa051\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6srbt" Sep 29 21:46:32 crc kubenswrapper[4911]: I0929 21:46:32.487655 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8a132bff-9655-4b4f-9574-ff04307fa051-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-6srbt\" (UID: \"8a132bff-9655-4b4f-9574-ff04307fa051\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6srbt" Sep 29 21:46:32 crc kubenswrapper[4911]: I0929 21:46:32.494123 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a132bff-9655-4b4f-9574-ff04307fa051-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-6srbt\" (UID: \"8a132bff-9655-4b4f-9574-ff04307fa051\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6srbt" Sep 29 21:46:32 crc kubenswrapper[4911]: I0929 21:46:32.494345 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8a132bff-9655-4b4f-9574-ff04307fa051-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-6srbt\" (UID: \"8a132bff-9655-4b4f-9574-ff04307fa051\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6srbt" Sep 29 21:46:32 crc kubenswrapper[4911]: I0929 21:46:32.501914 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2654\" (UniqueName: \"kubernetes.io/projected/8a132bff-9655-4b4f-9574-ff04307fa051-kube-api-access-s2654\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-6srbt\" (UID: \"8a132bff-9655-4b4f-9574-ff04307fa051\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6srbt" Sep 29 21:46:32 crc kubenswrapper[4911]: I0929 21:46:32.696721 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6srbt" Sep 29 21:46:33 crc kubenswrapper[4911]: I0929 21:46:33.281713 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6srbt"] Sep 29 21:46:33 crc kubenswrapper[4911]: W0929 21:46:33.294691 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8a132bff_9655_4b4f_9574_ff04307fa051.slice/crio-16b257e5b493815c1f0d7b0e7362e9b57e2d0898983d9f6a677015defd652133 WatchSource:0}: Error finding container 16b257e5b493815c1f0d7b0e7362e9b57e2d0898983d9f6a677015defd652133: Status 404 returned error can't find the container with id 16b257e5b493815c1f0d7b0e7362e9b57e2d0898983d9f6a677015defd652133 Sep 29 21:46:34 crc kubenswrapper[4911]: I0929 21:46:34.295425 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6srbt" event={"ID":"8a132bff-9655-4b4f-9574-ff04307fa051","Type":"ContainerStarted","Data":"5b472f6db0f659db4389ef2aaa33c0753b8063c8013cda09a8c7696b3b0457a3"} Sep 29 21:46:34 crc kubenswrapper[4911]: I0929 21:46:34.295726 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6srbt" event={"ID":"8a132bff-9655-4b4f-9574-ff04307fa051","Type":"ContainerStarted","Data":"16b257e5b493815c1f0d7b0e7362e9b57e2d0898983d9f6a677015defd652133"} Sep 29 21:46:34 crc kubenswrapper[4911]: I0929 21:46:34.327566 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6srbt" podStartSLOduration=1.8039781160000001 podStartE2EDuration="2.327542454s" podCreationTimestamp="2025-09-29 21:46:32 +0000 UTC" firstStartedPulling="2025-09-29 21:46:33.298093689 +0000 UTC m=+1271.275206360" lastFinishedPulling="2025-09-29 21:46:33.821657987 +0000 UTC m=+1271.798770698" observedRunningTime="2025-09-29 21:46:34.314561189 +0000 UTC m=+1272.291673900" watchObservedRunningTime="2025-09-29 21:46:34.327542454 +0000 UTC m=+1272.304655165" Sep 29 21:46:55 crc kubenswrapper[4911]: I0929 21:46:55.211510 4911 patch_prober.go:28] interesting pod/machine-config-daemon-w647f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 21:46:55 crc kubenswrapper[4911]: I0929 21:46:55.212974 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 21:46:55 crc kubenswrapper[4911]: I0929 21:46:55.213067 4911 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-w647f" Sep 29 21:46:55 crc kubenswrapper[4911]: I0929 21:46:55.213981 4911 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"134746612eaa8a9a73112d2bf779a2a7d4f9c664598301b1e0b9cd02784dad89"} pod="openshift-machine-config-operator/machine-config-daemon-w647f" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 21:46:55 crc kubenswrapper[4911]: I0929 21:46:55.214050 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" containerName="machine-config-daemon" containerID="cri-o://134746612eaa8a9a73112d2bf779a2a7d4f9c664598301b1e0b9cd02784dad89" gracePeriod=600 Sep 29 21:46:55 crc kubenswrapper[4911]: I0929 21:46:55.525233 4911 generic.go:334] "Generic (PLEG): container finished" podID="50640abc-40db-4390-82d1-f3cfc76da71c" containerID="134746612eaa8a9a73112d2bf779a2a7d4f9c664598301b1e0b9cd02784dad89" exitCode=0 Sep 29 21:46:55 crc kubenswrapper[4911]: I0929 21:46:55.525295 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w647f" event={"ID":"50640abc-40db-4390-82d1-f3cfc76da71c","Type":"ContainerDied","Data":"134746612eaa8a9a73112d2bf779a2a7d4f9c664598301b1e0b9cd02784dad89"} Sep 29 21:46:55 crc kubenswrapper[4911]: I0929 21:46:55.525566 4911 scope.go:117] "RemoveContainer" containerID="4a9c99b6ceab26bcf54375dc4957b5762f55e899af1d807a48454b472085e569" Sep 29 21:46:56 crc kubenswrapper[4911]: I0929 21:46:56.538068 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w647f" event={"ID":"50640abc-40db-4390-82d1-f3cfc76da71c","Type":"ContainerStarted","Data":"d13b753cc9aafc8d8aea3ea478f3d4351488778dd330e53fcc751897853d776a"} Sep 29 21:47:35 crc kubenswrapper[4911]: I0929 21:47:35.675689 4911 scope.go:117] "RemoveContainer" containerID="584e8c4c181197a4a1582a896356294deda7ef8be81123dfee5064f20966e9ff" Sep 29 21:48:35 crc kubenswrapper[4911]: I0929 21:48:35.771286 4911 scope.go:117] "RemoveContainer" containerID="01c89657e24e897503e29264bfec753bae6aed2ea2b1e9e35b6f646c4b04171b" Sep 29 21:48:50 crc kubenswrapper[4911]: I0929 21:48:50.727812 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-4pk96"] Sep 29 21:48:50 crc kubenswrapper[4911]: I0929 21:48:50.732117 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4pk96" Sep 29 21:48:50 crc kubenswrapper[4911]: I0929 21:48:50.741616 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4pk96"] Sep 29 21:48:50 crc kubenswrapper[4911]: I0929 21:48:50.837312 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f467d373-0ce8-45c4-bec1-bbbd30d43e54-catalog-content\") pod \"certified-operators-4pk96\" (UID: \"f467d373-0ce8-45c4-bec1-bbbd30d43e54\") " pod="openshift-marketplace/certified-operators-4pk96" Sep 29 21:48:50 crc kubenswrapper[4911]: I0929 21:48:50.837750 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jb9gc\" (UniqueName: \"kubernetes.io/projected/f467d373-0ce8-45c4-bec1-bbbd30d43e54-kube-api-access-jb9gc\") pod \"certified-operators-4pk96\" (UID: \"f467d373-0ce8-45c4-bec1-bbbd30d43e54\") " pod="openshift-marketplace/certified-operators-4pk96" Sep 29 21:48:50 crc kubenswrapper[4911]: I0929 21:48:50.837916 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f467d373-0ce8-45c4-bec1-bbbd30d43e54-utilities\") pod \"certified-operators-4pk96\" (UID: \"f467d373-0ce8-45c4-bec1-bbbd30d43e54\") " pod="openshift-marketplace/certified-operators-4pk96" Sep 29 21:48:50 crc kubenswrapper[4911]: I0929 21:48:50.940037 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f467d373-0ce8-45c4-bec1-bbbd30d43e54-catalog-content\") pod \"certified-operators-4pk96\" (UID: \"f467d373-0ce8-45c4-bec1-bbbd30d43e54\") " pod="openshift-marketplace/certified-operators-4pk96" Sep 29 21:48:50 crc kubenswrapper[4911]: I0929 21:48:50.940496 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jb9gc\" (UniqueName: \"kubernetes.io/projected/f467d373-0ce8-45c4-bec1-bbbd30d43e54-kube-api-access-jb9gc\") pod \"certified-operators-4pk96\" (UID: \"f467d373-0ce8-45c4-bec1-bbbd30d43e54\") " pod="openshift-marketplace/certified-operators-4pk96" Sep 29 21:48:50 crc kubenswrapper[4911]: I0929 21:48:50.940552 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f467d373-0ce8-45c4-bec1-bbbd30d43e54-utilities\") pod \"certified-operators-4pk96\" (UID: \"f467d373-0ce8-45c4-bec1-bbbd30d43e54\") " pod="openshift-marketplace/certified-operators-4pk96" Sep 29 21:48:50 crc kubenswrapper[4911]: I0929 21:48:50.941498 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f467d373-0ce8-45c4-bec1-bbbd30d43e54-utilities\") pod \"certified-operators-4pk96\" (UID: \"f467d373-0ce8-45c4-bec1-bbbd30d43e54\") " pod="openshift-marketplace/certified-operators-4pk96" Sep 29 21:48:50 crc kubenswrapper[4911]: I0929 21:48:50.941696 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f467d373-0ce8-45c4-bec1-bbbd30d43e54-catalog-content\") pod \"certified-operators-4pk96\" (UID: \"f467d373-0ce8-45c4-bec1-bbbd30d43e54\") " pod="openshift-marketplace/certified-operators-4pk96" Sep 29 21:48:50 crc kubenswrapper[4911]: I0929 21:48:50.979590 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jb9gc\" (UniqueName: \"kubernetes.io/projected/f467d373-0ce8-45c4-bec1-bbbd30d43e54-kube-api-access-jb9gc\") pod \"certified-operators-4pk96\" (UID: \"f467d373-0ce8-45c4-bec1-bbbd30d43e54\") " pod="openshift-marketplace/certified-operators-4pk96" Sep 29 21:48:51 crc kubenswrapper[4911]: I0929 21:48:51.064744 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4pk96" Sep 29 21:48:51 crc kubenswrapper[4911]: I0929 21:48:51.524078 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4pk96"] Sep 29 21:48:51 crc kubenswrapper[4911]: I0929 21:48:51.881182 4911 generic.go:334] "Generic (PLEG): container finished" podID="f467d373-0ce8-45c4-bec1-bbbd30d43e54" containerID="3a854044b2a61019a8022a875443a586293fc33e68f12e90c1b640031465a9ed" exitCode=0 Sep 29 21:48:51 crc kubenswrapper[4911]: I0929 21:48:51.881306 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4pk96" event={"ID":"f467d373-0ce8-45c4-bec1-bbbd30d43e54","Type":"ContainerDied","Data":"3a854044b2a61019a8022a875443a586293fc33e68f12e90c1b640031465a9ed"} Sep 29 21:48:51 crc kubenswrapper[4911]: I0929 21:48:51.881534 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4pk96" event={"ID":"f467d373-0ce8-45c4-bec1-bbbd30d43e54","Type":"ContainerStarted","Data":"8c8fb376805efaa993016db46bcda180c0502342f991e8ce038878f1703266ec"} Sep 29 21:48:52 crc kubenswrapper[4911]: I0929 21:48:52.892354 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4pk96" event={"ID":"f467d373-0ce8-45c4-bec1-bbbd30d43e54","Type":"ContainerStarted","Data":"4e1c1cf0a04f199201159b2c5a90bec7bc451528f08dc7d7d67deeb5d3e815d0"} Sep 29 21:48:53 crc kubenswrapper[4911]: I0929 21:48:53.921423 4911 generic.go:334] "Generic (PLEG): container finished" podID="f467d373-0ce8-45c4-bec1-bbbd30d43e54" containerID="4e1c1cf0a04f199201159b2c5a90bec7bc451528f08dc7d7d67deeb5d3e815d0" exitCode=0 Sep 29 21:48:53 crc kubenswrapper[4911]: I0929 21:48:53.921871 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4pk96" event={"ID":"f467d373-0ce8-45c4-bec1-bbbd30d43e54","Type":"ContainerDied","Data":"4e1c1cf0a04f199201159b2c5a90bec7bc451528f08dc7d7d67deeb5d3e815d0"} Sep 29 21:48:54 crc kubenswrapper[4911]: I0929 21:48:54.933348 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4pk96" event={"ID":"f467d373-0ce8-45c4-bec1-bbbd30d43e54","Type":"ContainerStarted","Data":"a09b64d509d61a5ac010d1e076178862b303b6003c6db6b0462db1c34b793673"} Sep 29 21:48:54 crc kubenswrapper[4911]: I0929 21:48:54.957807 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-4pk96" podStartSLOduration=2.475172363 podStartE2EDuration="4.957772431s" podCreationTimestamp="2025-09-29 21:48:50 +0000 UTC" firstStartedPulling="2025-09-29 21:48:51.883033238 +0000 UTC m=+1409.860145909" lastFinishedPulling="2025-09-29 21:48:54.365633296 +0000 UTC m=+1412.342745977" observedRunningTime="2025-09-29 21:48:54.95291331 +0000 UTC m=+1412.930026011" watchObservedRunningTime="2025-09-29 21:48:54.957772431 +0000 UTC m=+1412.934885102" Sep 29 21:48:55 crc kubenswrapper[4911]: I0929 21:48:55.211207 4911 patch_prober.go:28] interesting pod/machine-config-daemon-w647f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 21:48:55 crc kubenswrapper[4911]: I0929 21:48:55.211274 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 21:49:01 crc kubenswrapper[4911]: I0929 21:49:01.064975 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-4pk96" Sep 29 21:49:01 crc kubenswrapper[4911]: I0929 21:49:01.065687 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-4pk96" Sep 29 21:49:01 crc kubenswrapper[4911]: I0929 21:49:01.128301 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-4pk96" Sep 29 21:49:02 crc kubenswrapper[4911]: I0929 21:49:02.074654 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-4pk96" Sep 29 21:49:02 crc kubenswrapper[4911]: I0929 21:49:02.130666 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4pk96"] Sep 29 21:49:04 crc kubenswrapper[4911]: I0929 21:49:04.019249 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-4pk96" podUID="f467d373-0ce8-45c4-bec1-bbbd30d43e54" containerName="registry-server" containerID="cri-o://a09b64d509d61a5ac010d1e076178862b303b6003c6db6b0462db1c34b793673" gracePeriod=2 Sep 29 21:49:04 crc kubenswrapper[4911]: I0929 21:49:04.589998 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4pk96" Sep 29 21:49:04 crc kubenswrapper[4911]: I0929 21:49:04.715144 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jb9gc\" (UniqueName: \"kubernetes.io/projected/f467d373-0ce8-45c4-bec1-bbbd30d43e54-kube-api-access-jb9gc\") pod \"f467d373-0ce8-45c4-bec1-bbbd30d43e54\" (UID: \"f467d373-0ce8-45c4-bec1-bbbd30d43e54\") " Sep 29 21:49:04 crc kubenswrapper[4911]: I0929 21:49:04.715338 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f467d373-0ce8-45c4-bec1-bbbd30d43e54-utilities\") pod \"f467d373-0ce8-45c4-bec1-bbbd30d43e54\" (UID: \"f467d373-0ce8-45c4-bec1-bbbd30d43e54\") " Sep 29 21:49:04 crc kubenswrapper[4911]: I0929 21:49:04.715438 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f467d373-0ce8-45c4-bec1-bbbd30d43e54-catalog-content\") pod \"f467d373-0ce8-45c4-bec1-bbbd30d43e54\" (UID: \"f467d373-0ce8-45c4-bec1-bbbd30d43e54\") " Sep 29 21:49:04 crc kubenswrapper[4911]: I0929 21:49:04.717024 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f467d373-0ce8-45c4-bec1-bbbd30d43e54-utilities" (OuterVolumeSpecName: "utilities") pod "f467d373-0ce8-45c4-bec1-bbbd30d43e54" (UID: "f467d373-0ce8-45c4-bec1-bbbd30d43e54"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:49:04 crc kubenswrapper[4911]: I0929 21:49:04.721943 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f467d373-0ce8-45c4-bec1-bbbd30d43e54-kube-api-access-jb9gc" (OuterVolumeSpecName: "kube-api-access-jb9gc") pod "f467d373-0ce8-45c4-bec1-bbbd30d43e54" (UID: "f467d373-0ce8-45c4-bec1-bbbd30d43e54"). InnerVolumeSpecName "kube-api-access-jb9gc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:49:04 crc kubenswrapper[4911]: I0929 21:49:04.759883 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f467d373-0ce8-45c4-bec1-bbbd30d43e54-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f467d373-0ce8-45c4-bec1-bbbd30d43e54" (UID: "f467d373-0ce8-45c4-bec1-bbbd30d43e54"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:49:04 crc kubenswrapper[4911]: I0929 21:49:04.817642 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f467d373-0ce8-45c4-bec1-bbbd30d43e54-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 21:49:04 crc kubenswrapper[4911]: I0929 21:49:04.817695 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f467d373-0ce8-45c4-bec1-bbbd30d43e54-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 21:49:04 crc kubenswrapper[4911]: I0929 21:49:04.817711 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jb9gc\" (UniqueName: \"kubernetes.io/projected/f467d373-0ce8-45c4-bec1-bbbd30d43e54-kube-api-access-jb9gc\") on node \"crc\" DevicePath \"\"" Sep 29 21:49:05 crc kubenswrapper[4911]: I0929 21:49:05.033541 4911 generic.go:334] "Generic (PLEG): container finished" podID="f467d373-0ce8-45c4-bec1-bbbd30d43e54" containerID="a09b64d509d61a5ac010d1e076178862b303b6003c6db6b0462db1c34b793673" exitCode=0 Sep 29 21:49:05 crc kubenswrapper[4911]: I0929 21:49:05.033608 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4pk96" event={"ID":"f467d373-0ce8-45c4-bec1-bbbd30d43e54","Type":"ContainerDied","Data":"a09b64d509d61a5ac010d1e076178862b303b6003c6db6b0462db1c34b793673"} Sep 29 21:49:05 crc kubenswrapper[4911]: I0929 21:49:05.033620 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4pk96" Sep 29 21:49:05 crc kubenswrapper[4911]: I0929 21:49:05.033664 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4pk96" event={"ID":"f467d373-0ce8-45c4-bec1-bbbd30d43e54","Type":"ContainerDied","Data":"8c8fb376805efaa993016db46bcda180c0502342f991e8ce038878f1703266ec"} Sep 29 21:49:05 crc kubenswrapper[4911]: I0929 21:49:05.033705 4911 scope.go:117] "RemoveContainer" containerID="a09b64d509d61a5ac010d1e076178862b303b6003c6db6b0462db1c34b793673" Sep 29 21:49:05 crc kubenswrapper[4911]: I0929 21:49:05.064727 4911 scope.go:117] "RemoveContainer" containerID="4e1c1cf0a04f199201159b2c5a90bec7bc451528f08dc7d7d67deeb5d3e815d0" Sep 29 21:49:05 crc kubenswrapper[4911]: I0929 21:49:05.088353 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4pk96"] Sep 29 21:49:05 crc kubenswrapper[4911]: I0929 21:49:05.098733 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-4pk96"] Sep 29 21:49:05 crc kubenswrapper[4911]: I0929 21:49:05.113008 4911 scope.go:117] "RemoveContainer" containerID="3a854044b2a61019a8022a875443a586293fc33e68f12e90c1b640031465a9ed" Sep 29 21:49:05 crc kubenswrapper[4911]: I0929 21:49:05.141761 4911 scope.go:117] "RemoveContainer" containerID="a09b64d509d61a5ac010d1e076178862b303b6003c6db6b0462db1c34b793673" Sep 29 21:49:05 crc kubenswrapper[4911]: E0929 21:49:05.142213 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a09b64d509d61a5ac010d1e076178862b303b6003c6db6b0462db1c34b793673\": container with ID starting with a09b64d509d61a5ac010d1e076178862b303b6003c6db6b0462db1c34b793673 not found: ID does not exist" containerID="a09b64d509d61a5ac010d1e076178862b303b6003c6db6b0462db1c34b793673" Sep 29 21:49:05 crc kubenswrapper[4911]: I0929 21:49:05.142250 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a09b64d509d61a5ac010d1e076178862b303b6003c6db6b0462db1c34b793673"} err="failed to get container status \"a09b64d509d61a5ac010d1e076178862b303b6003c6db6b0462db1c34b793673\": rpc error: code = NotFound desc = could not find container \"a09b64d509d61a5ac010d1e076178862b303b6003c6db6b0462db1c34b793673\": container with ID starting with a09b64d509d61a5ac010d1e076178862b303b6003c6db6b0462db1c34b793673 not found: ID does not exist" Sep 29 21:49:05 crc kubenswrapper[4911]: I0929 21:49:05.142275 4911 scope.go:117] "RemoveContainer" containerID="4e1c1cf0a04f199201159b2c5a90bec7bc451528f08dc7d7d67deeb5d3e815d0" Sep 29 21:49:05 crc kubenswrapper[4911]: E0929 21:49:05.142630 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4e1c1cf0a04f199201159b2c5a90bec7bc451528f08dc7d7d67deeb5d3e815d0\": container with ID starting with 4e1c1cf0a04f199201159b2c5a90bec7bc451528f08dc7d7d67deeb5d3e815d0 not found: ID does not exist" containerID="4e1c1cf0a04f199201159b2c5a90bec7bc451528f08dc7d7d67deeb5d3e815d0" Sep 29 21:49:05 crc kubenswrapper[4911]: I0929 21:49:05.142655 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4e1c1cf0a04f199201159b2c5a90bec7bc451528f08dc7d7d67deeb5d3e815d0"} err="failed to get container status \"4e1c1cf0a04f199201159b2c5a90bec7bc451528f08dc7d7d67deeb5d3e815d0\": rpc error: code = NotFound desc = could not find container \"4e1c1cf0a04f199201159b2c5a90bec7bc451528f08dc7d7d67deeb5d3e815d0\": container with ID starting with 4e1c1cf0a04f199201159b2c5a90bec7bc451528f08dc7d7d67deeb5d3e815d0 not found: ID does not exist" Sep 29 21:49:05 crc kubenswrapper[4911]: I0929 21:49:05.142670 4911 scope.go:117] "RemoveContainer" containerID="3a854044b2a61019a8022a875443a586293fc33e68f12e90c1b640031465a9ed" Sep 29 21:49:05 crc kubenswrapper[4911]: E0929 21:49:05.143046 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3a854044b2a61019a8022a875443a586293fc33e68f12e90c1b640031465a9ed\": container with ID starting with 3a854044b2a61019a8022a875443a586293fc33e68f12e90c1b640031465a9ed not found: ID does not exist" containerID="3a854044b2a61019a8022a875443a586293fc33e68f12e90c1b640031465a9ed" Sep 29 21:49:05 crc kubenswrapper[4911]: I0929 21:49:05.143134 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3a854044b2a61019a8022a875443a586293fc33e68f12e90c1b640031465a9ed"} err="failed to get container status \"3a854044b2a61019a8022a875443a586293fc33e68f12e90c1b640031465a9ed\": rpc error: code = NotFound desc = could not find container \"3a854044b2a61019a8022a875443a586293fc33e68f12e90c1b640031465a9ed\": container with ID starting with 3a854044b2a61019a8022a875443a586293fc33e68f12e90c1b640031465a9ed not found: ID does not exist" Sep 29 21:49:06 crc kubenswrapper[4911]: I0929 21:49:06.715964 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f467d373-0ce8-45c4-bec1-bbbd30d43e54" path="/var/lib/kubelet/pods/f467d373-0ce8-45c4-bec1-bbbd30d43e54/volumes" Sep 29 21:49:25 crc kubenswrapper[4911]: I0929 21:49:25.210957 4911 patch_prober.go:28] interesting pod/machine-config-daemon-w647f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 21:49:25 crc kubenswrapper[4911]: I0929 21:49:25.211551 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 21:49:52 crc kubenswrapper[4911]: I0929 21:49:52.524947 4911 generic.go:334] "Generic (PLEG): container finished" podID="8a132bff-9655-4b4f-9574-ff04307fa051" containerID="5b472f6db0f659db4389ef2aaa33c0753b8063c8013cda09a8c7696b3b0457a3" exitCode=0 Sep 29 21:49:52 crc kubenswrapper[4911]: I0929 21:49:52.525191 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6srbt" event={"ID":"8a132bff-9655-4b4f-9574-ff04307fa051","Type":"ContainerDied","Data":"5b472f6db0f659db4389ef2aaa33c0753b8063c8013cda09a8c7696b3b0457a3"} Sep 29 21:49:54 crc kubenswrapper[4911]: I0929 21:49:54.085362 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6srbt" Sep 29 21:49:54 crc kubenswrapper[4911]: I0929 21:49:54.221380 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s2654\" (UniqueName: \"kubernetes.io/projected/8a132bff-9655-4b4f-9574-ff04307fa051-kube-api-access-s2654\") pod \"8a132bff-9655-4b4f-9574-ff04307fa051\" (UID: \"8a132bff-9655-4b4f-9574-ff04307fa051\") " Sep 29 21:49:54 crc kubenswrapper[4911]: I0929 21:49:54.221565 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8a132bff-9655-4b4f-9574-ff04307fa051-ssh-key\") pod \"8a132bff-9655-4b4f-9574-ff04307fa051\" (UID: \"8a132bff-9655-4b4f-9574-ff04307fa051\") " Sep 29 21:49:54 crc kubenswrapper[4911]: I0929 21:49:54.221615 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a132bff-9655-4b4f-9574-ff04307fa051-bootstrap-combined-ca-bundle\") pod \"8a132bff-9655-4b4f-9574-ff04307fa051\" (UID: \"8a132bff-9655-4b4f-9574-ff04307fa051\") " Sep 29 21:49:54 crc kubenswrapper[4911]: I0929 21:49:54.221690 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8a132bff-9655-4b4f-9574-ff04307fa051-inventory\") pod \"8a132bff-9655-4b4f-9574-ff04307fa051\" (UID: \"8a132bff-9655-4b4f-9574-ff04307fa051\") " Sep 29 21:49:54 crc kubenswrapper[4911]: I0929 21:49:54.227325 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8a132bff-9655-4b4f-9574-ff04307fa051-kube-api-access-s2654" (OuterVolumeSpecName: "kube-api-access-s2654") pod "8a132bff-9655-4b4f-9574-ff04307fa051" (UID: "8a132bff-9655-4b4f-9574-ff04307fa051"). InnerVolumeSpecName "kube-api-access-s2654". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:49:54 crc kubenswrapper[4911]: I0929 21:49:54.228917 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a132bff-9655-4b4f-9574-ff04307fa051-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "8a132bff-9655-4b4f-9574-ff04307fa051" (UID: "8a132bff-9655-4b4f-9574-ff04307fa051"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:49:54 crc kubenswrapper[4911]: I0929 21:49:54.265005 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a132bff-9655-4b4f-9574-ff04307fa051-inventory" (OuterVolumeSpecName: "inventory") pod "8a132bff-9655-4b4f-9574-ff04307fa051" (UID: "8a132bff-9655-4b4f-9574-ff04307fa051"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:49:54 crc kubenswrapper[4911]: I0929 21:49:54.275360 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a132bff-9655-4b4f-9574-ff04307fa051-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "8a132bff-9655-4b4f-9574-ff04307fa051" (UID: "8a132bff-9655-4b4f-9574-ff04307fa051"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:49:54 crc kubenswrapper[4911]: I0929 21:49:54.324864 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s2654\" (UniqueName: \"kubernetes.io/projected/8a132bff-9655-4b4f-9574-ff04307fa051-kube-api-access-s2654\") on node \"crc\" DevicePath \"\"" Sep 29 21:49:54 crc kubenswrapper[4911]: I0929 21:49:54.324910 4911 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8a132bff-9655-4b4f-9574-ff04307fa051-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 21:49:54 crc kubenswrapper[4911]: I0929 21:49:54.324929 4911 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a132bff-9655-4b4f-9574-ff04307fa051-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 21:49:54 crc kubenswrapper[4911]: I0929 21:49:54.324949 4911 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8a132bff-9655-4b4f-9574-ff04307fa051-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 21:49:54 crc kubenswrapper[4911]: I0929 21:49:54.552323 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6srbt" event={"ID":"8a132bff-9655-4b4f-9574-ff04307fa051","Type":"ContainerDied","Data":"16b257e5b493815c1f0d7b0e7362e9b57e2d0898983d9f6a677015defd652133"} Sep 29 21:49:54 crc kubenswrapper[4911]: I0929 21:49:54.552372 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="16b257e5b493815c1f0d7b0e7362e9b57e2d0898983d9f6a677015defd652133" Sep 29 21:49:54 crc kubenswrapper[4911]: I0929 21:49:54.552441 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6srbt" Sep 29 21:49:54 crc kubenswrapper[4911]: I0929 21:49:54.670954 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-m4ghz"] Sep 29 21:49:54 crc kubenswrapper[4911]: E0929 21:49:54.671894 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a132bff-9655-4b4f-9574-ff04307fa051" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Sep 29 21:49:54 crc kubenswrapper[4911]: I0929 21:49:54.671939 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a132bff-9655-4b4f-9574-ff04307fa051" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Sep 29 21:49:54 crc kubenswrapper[4911]: E0929 21:49:54.671984 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f467d373-0ce8-45c4-bec1-bbbd30d43e54" containerName="registry-server" Sep 29 21:49:54 crc kubenswrapper[4911]: I0929 21:49:54.672002 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="f467d373-0ce8-45c4-bec1-bbbd30d43e54" containerName="registry-server" Sep 29 21:49:54 crc kubenswrapper[4911]: E0929 21:49:54.672051 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f467d373-0ce8-45c4-bec1-bbbd30d43e54" containerName="extract-content" Sep 29 21:49:54 crc kubenswrapper[4911]: I0929 21:49:54.672070 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="f467d373-0ce8-45c4-bec1-bbbd30d43e54" containerName="extract-content" Sep 29 21:49:54 crc kubenswrapper[4911]: E0929 21:49:54.672132 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f467d373-0ce8-45c4-bec1-bbbd30d43e54" containerName="extract-utilities" Sep 29 21:49:54 crc kubenswrapper[4911]: I0929 21:49:54.672150 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="f467d373-0ce8-45c4-bec1-bbbd30d43e54" containerName="extract-utilities" Sep 29 21:49:54 crc kubenswrapper[4911]: I0929 21:49:54.672660 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="8a132bff-9655-4b4f-9574-ff04307fa051" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Sep 29 21:49:54 crc kubenswrapper[4911]: I0929 21:49:54.672729 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="f467d373-0ce8-45c4-bec1-bbbd30d43e54" containerName="registry-server" Sep 29 21:49:54 crc kubenswrapper[4911]: I0929 21:49:54.673829 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-m4ghz" Sep 29 21:49:54 crc kubenswrapper[4911]: I0929 21:49:54.676521 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-h4cgt" Sep 29 21:49:54 crc kubenswrapper[4911]: I0929 21:49:54.676827 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 21:49:54 crc kubenswrapper[4911]: I0929 21:49:54.677242 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 21:49:54 crc kubenswrapper[4911]: I0929 21:49:54.678529 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 21:49:54 crc kubenswrapper[4911]: I0929 21:49:54.687201 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-m4ghz"] Sep 29 21:49:54 crc kubenswrapper[4911]: I0929 21:49:54.732261 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a7ffc2de-d6d9-4e9f-a6c8-a4080e155c99-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-m4ghz\" (UID: \"a7ffc2de-d6d9-4e9f-a6c8-a4080e155c99\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-m4ghz" Sep 29 21:49:54 crc kubenswrapper[4911]: I0929 21:49:54.732374 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a7ffc2de-d6d9-4e9f-a6c8-a4080e155c99-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-m4ghz\" (UID: \"a7ffc2de-d6d9-4e9f-a6c8-a4080e155c99\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-m4ghz" Sep 29 21:49:54 crc kubenswrapper[4911]: I0929 21:49:54.732451 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8v82z\" (UniqueName: \"kubernetes.io/projected/a7ffc2de-d6d9-4e9f-a6c8-a4080e155c99-kube-api-access-8v82z\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-m4ghz\" (UID: \"a7ffc2de-d6d9-4e9f-a6c8-a4080e155c99\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-m4ghz" Sep 29 21:49:54 crc kubenswrapper[4911]: I0929 21:49:54.834583 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a7ffc2de-d6d9-4e9f-a6c8-a4080e155c99-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-m4ghz\" (UID: \"a7ffc2de-d6d9-4e9f-a6c8-a4080e155c99\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-m4ghz" Sep 29 21:49:54 crc kubenswrapper[4911]: I0929 21:49:54.834737 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8v82z\" (UniqueName: \"kubernetes.io/projected/a7ffc2de-d6d9-4e9f-a6c8-a4080e155c99-kube-api-access-8v82z\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-m4ghz\" (UID: \"a7ffc2de-d6d9-4e9f-a6c8-a4080e155c99\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-m4ghz" Sep 29 21:49:54 crc kubenswrapper[4911]: I0929 21:49:54.834907 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a7ffc2de-d6d9-4e9f-a6c8-a4080e155c99-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-m4ghz\" (UID: \"a7ffc2de-d6d9-4e9f-a6c8-a4080e155c99\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-m4ghz" Sep 29 21:49:54 crc kubenswrapper[4911]: I0929 21:49:54.839399 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a7ffc2de-d6d9-4e9f-a6c8-a4080e155c99-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-m4ghz\" (UID: \"a7ffc2de-d6d9-4e9f-a6c8-a4080e155c99\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-m4ghz" Sep 29 21:49:54 crc kubenswrapper[4911]: I0929 21:49:54.839877 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a7ffc2de-d6d9-4e9f-a6c8-a4080e155c99-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-m4ghz\" (UID: \"a7ffc2de-d6d9-4e9f-a6c8-a4080e155c99\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-m4ghz" Sep 29 21:49:54 crc kubenswrapper[4911]: I0929 21:49:54.853710 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8v82z\" (UniqueName: \"kubernetes.io/projected/a7ffc2de-d6d9-4e9f-a6c8-a4080e155c99-kube-api-access-8v82z\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-m4ghz\" (UID: \"a7ffc2de-d6d9-4e9f-a6c8-a4080e155c99\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-m4ghz" Sep 29 21:49:55 crc kubenswrapper[4911]: I0929 21:49:55.003684 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-m4ghz" Sep 29 21:49:55 crc kubenswrapper[4911]: I0929 21:49:55.211052 4911 patch_prober.go:28] interesting pod/machine-config-daemon-w647f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 21:49:55 crc kubenswrapper[4911]: I0929 21:49:55.211427 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 21:49:55 crc kubenswrapper[4911]: I0929 21:49:55.211488 4911 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-w647f" Sep 29 21:49:55 crc kubenswrapper[4911]: I0929 21:49:55.212458 4911 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d13b753cc9aafc8d8aea3ea478f3d4351488778dd330e53fcc751897853d776a"} pod="openshift-machine-config-operator/machine-config-daemon-w647f" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 21:49:55 crc kubenswrapper[4911]: I0929 21:49:55.212551 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" containerName="machine-config-daemon" containerID="cri-o://d13b753cc9aafc8d8aea3ea478f3d4351488778dd330e53fcc751897853d776a" gracePeriod=600 Sep 29 21:49:55 crc kubenswrapper[4911]: E0929 21:49:55.359712 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w647f_openshift-machine-config-operator(50640abc-40db-4390-82d1-f3cfc76da71c)\"" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" Sep 29 21:49:55 crc kubenswrapper[4911]: I0929 21:49:55.411106 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-m4ghz"] Sep 29 21:49:55 crc kubenswrapper[4911]: I0929 21:49:55.427671 4911 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 21:49:55 crc kubenswrapper[4911]: I0929 21:49:55.568784 4911 generic.go:334] "Generic (PLEG): container finished" podID="50640abc-40db-4390-82d1-f3cfc76da71c" containerID="d13b753cc9aafc8d8aea3ea478f3d4351488778dd330e53fcc751897853d776a" exitCode=0 Sep 29 21:49:55 crc kubenswrapper[4911]: I0929 21:49:55.568906 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w647f" event={"ID":"50640abc-40db-4390-82d1-f3cfc76da71c","Type":"ContainerDied","Data":"d13b753cc9aafc8d8aea3ea478f3d4351488778dd330e53fcc751897853d776a"} Sep 29 21:49:55 crc kubenswrapper[4911]: I0929 21:49:55.569325 4911 scope.go:117] "RemoveContainer" containerID="134746612eaa8a9a73112d2bf779a2a7d4f9c664598301b1e0b9cd02784dad89" Sep 29 21:49:55 crc kubenswrapper[4911]: I0929 21:49:55.570333 4911 scope.go:117] "RemoveContainer" containerID="d13b753cc9aafc8d8aea3ea478f3d4351488778dd330e53fcc751897853d776a" Sep 29 21:49:55 crc kubenswrapper[4911]: E0929 21:49:55.570754 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w647f_openshift-machine-config-operator(50640abc-40db-4390-82d1-f3cfc76da71c)\"" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" Sep 29 21:49:55 crc kubenswrapper[4911]: I0929 21:49:55.571240 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-m4ghz" event={"ID":"a7ffc2de-d6d9-4e9f-a6c8-a4080e155c99","Type":"ContainerStarted","Data":"adefe1758d42583e837efde1e605f31368fc8f5252418021b8d036887cc23a5c"} Sep 29 21:49:56 crc kubenswrapper[4911]: I0929 21:49:56.587719 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-m4ghz" event={"ID":"a7ffc2de-d6d9-4e9f-a6c8-a4080e155c99","Type":"ContainerStarted","Data":"9b7528a40eb2e50577b5a5ba6e9f8fd35524b121553aed61d09a3fa3c0b6ad6e"} Sep 29 21:49:56 crc kubenswrapper[4911]: I0929 21:49:56.615069 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-m4ghz" podStartSLOduration=2.13211925 podStartE2EDuration="2.61503875s" podCreationTimestamp="2025-09-29 21:49:54 +0000 UTC" firstStartedPulling="2025-09-29 21:49:55.427363133 +0000 UTC m=+1473.404475824" lastFinishedPulling="2025-09-29 21:49:55.910282653 +0000 UTC m=+1473.887395324" observedRunningTime="2025-09-29 21:49:56.608070222 +0000 UTC m=+1474.585182973" watchObservedRunningTime="2025-09-29 21:49:56.61503875 +0000 UTC m=+1474.592151461" Sep 29 21:50:07 crc kubenswrapper[4911]: I0929 21:50:07.702018 4911 scope.go:117] "RemoveContainer" containerID="d13b753cc9aafc8d8aea3ea478f3d4351488778dd330e53fcc751897853d776a" Sep 29 21:50:07 crc kubenswrapper[4911]: E0929 21:50:07.703143 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w647f_openshift-machine-config-operator(50640abc-40db-4390-82d1-f3cfc76da71c)\"" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" Sep 29 21:50:18 crc kubenswrapper[4911]: I0929 21:50:18.702113 4911 scope.go:117] "RemoveContainer" containerID="d13b753cc9aafc8d8aea3ea478f3d4351488778dd330e53fcc751897853d776a" Sep 29 21:50:18 crc kubenswrapper[4911]: E0929 21:50:18.703147 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w647f_openshift-machine-config-operator(50640abc-40db-4390-82d1-f3cfc76da71c)\"" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" Sep 29 21:50:32 crc kubenswrapper[4911]: I0929 21:50:32.710230 4911 scope.go:117] "RemoveContainer" containerID="d13b753cc9aafc8d8aea3ea478f3d4351488778dd330e53fcc751897853d776a" Sep 29 21:50:32 crc kubenswrapper[4911]: E0929 21:50:32.711281 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w647f_openshift-machine-config-operator(50640abc-40db-4390-82d1-f3cfc76da71c)\"" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" Sep 29 21:50:35 crc kubenswrapper[4911]: I0929 21:50:35.898200 4911 scope.go:117] "RemoveContainer" containerID="84c9b8aa455c4b9d4dec7ff584f98682a4936de9f034bc182a3098fe1e82adac" Sep 29 21:50:47 crc kubenswrapper[4911]: I0929 21:50:47.700683 4911 scope.go:117] "RemoveContainer" containerID="d13b753cc9aafc8d8aea3ea478f3d4351488778dd330e53fcc751897853d776a" Sep 29 21:50:47 crc kubenswrapper[4911]: E0929 21:50:47.701496 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w647f_openshift-machine-config-operator(50640abc-40db-4390-82d1-f3cfc76da71c)\"" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" Sep 29 21:50:56 crc kubenswrapper[4911]: I0929 21:50:56.042649 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-cfk2q"] Sep 29 21:50:56 crc kubenswrapper[4911]: I0929 21:50:56.050729 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-cfk2q"] Sep 29 21:50:56 crc kubenswrapper[4911]: I0929 21:50:56.714581 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="372a6b13-1d3f-4a0b-9210-4f5b669d486a" path="/var/lib/kubelet/pods/372a6b13-1d3f-4a0b-9210-4f5b669d486a/volumes" Sep 29 21:51:01 crc kubenswrapper[4911]: I0929 21:51:01.701554 4911 scope.go:117] "RemoveContainer" containerID="d13b753cc9aafc8d8aea3ea478f3d4351488778dd330e53fcc751897853d776a" Sep 29 21:51:01 crc kubenswrapper[4911]: E0929 21:51:01.702415 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w647f_openshift-machine-config-operator(50640abc-40db-4390-82d1-f3cfc76da71c)\"" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" Sep 29 21:51:02 crc kubenswrapper[4911]: I0929 21:51:02.037616 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-nq2n4"] Sep 29 21:51:02 crc kubenswrapper[4911]: I0929 21:51:02.048679 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-876nd"] Sep 29 21:51:02 crc kubenswrapper[4911]: I0929 21:51:02.057655 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-nq2n4"] Sep 29 21:51:02 crc kubenswrapper[4911]: I0929 21:51:02.069308 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-876nd"] Sep 29 21:51:02 crc kubenswrapper[4911]: I0929 21:51:02.715857 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7ac967ca-130d-4139-bb7e-57bbd1542f24" path="/var/lib/kubelet/pods/7ac967ca-130d-4139-bb7e-57bbd1542f24/volumes" Sep 29 21:51:02 crc kubenswrapper[4911]: I0929 21:51:02.716563 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d4773910-e3b9-4552-bb42-45f748b11e65" path="/var/lib/kubelet/pods/d4773910-e3b9-4552-bb42-45f748b11e65/volumes" Sep 29 21:51:08 crc kubenswrapper[4911]: I0929 21:51:08.038295 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-9075-account-create-b7jml"] Sep 29 21:51:08 crc kubenswrapper[4911]: I0929 21:51:08.050180 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-9075-account-create-b7jml"] Sep 29 21:51:08 crc kubenswrapper[4911]: I0929 21:51:08.716328 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d246d68-02be-4ee1-b0c8-f7d3a41dc1b9" path="/var/lib/kubelet/pods/9d246d68-02be-4ee1-b0c8-f7d3a41dc1b9/volumes" Sep 29 21:51:11 crc kubenswrapper[4911]: I0929 21:51:11.041776 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-33c5-account-create-shdxk"] Sep 29 21:51:11 crc kubenswrapper[4911]: I0929 21:51:11.049514 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-33c5-account-create-shdxk"] Sep 29 21:51:12 crc kubenswrapper[4911]: I0929 21:51:12.047485 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-4bbc-account-create-xdw6c"] Sep 29 21:51:12 crc kubenswrapper[4911]: I0929 21:51:12.060197 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-4bbc-account-create-xdw6c"] Sep 29 21:51:12 crc kubenswrapper[4911]: I0929 21:51:12.714172 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9759766a-e408-4d42-b84d-8acc6a3de4ee" path="/var/lib/kubelet/pods/9759766a-e408-4d42-b84d-8acc6a3de4ee/volumes" Sep 29 21:51:12 crc kubenswrapper[4911]: I0929 21:51:12.714916 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e4bbd41e-cd3c-4d2e-aa8d-f1e474171118" path="/var/lib/kubelet/pods/e4bbd41e-cd3c-4d2e-aa8d-f1e474171118/volumes" Sep 29 21:51:13 crc kubenswrapper[4911]: I0929 21:51:13.701530 4911 scope.go:117] "RemoveContainer" containerID="d13b753cc9aafc8d8aea3ea478f3d4351488778dd330e53fcc751897853d776a" Sep 29 21:51:13 crc kubenswrapper[4911]: E0929 21:51:13.702135 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w647f_openshift-machine-config-operator(50640abc-40db-4390-82d1-f3cfc76da71c)\"" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" Sep 29 21:51:27 crc kubenswrapper[4911]: I0929 21:51:27.701649 4911 scope.go:117] "RemoveContainer" containerID="d13b753cc9aafc8d8aea3ea478f3d4351488778dd330e53fcc751897853d776a" Sep 29 21:51:27 crc kubenswrapper[4911]: E0929 21:51:27.702536 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w647f_openshift-machine-config-operator(50640abc-40db-4390-82d1-f3cfc76da71c)\"" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" Sep 29 21:51:33 crc kubenswrapper[4911]: I0929 21:51:33.033566 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-qs6d4"] Sep 29 21:51:33 crc kubenswrapper[4911]: I0929 21:51:33.045646 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-652pw"] Sep 29 21:51:33 crc kubenswrapper[4911]: I0929 21:51:33.055017 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-zvhtr"] Sep 29 21:51:33 crc kubenswrapper[4911]: I0929 21:51:33.064782 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-652pw"] Sep 29 21:51:33 crc kubenswrapper[4911]: I0929 21:51:33.073597 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-zvhtr"] Sep 29 21:51:33 crc kubenswrapper[4911]: I0929 21:51:33.084115 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-qs6d4"] Sep 29 21:51:34 crc kubenswrapper[4911]: I0929 21:51:34.717377 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="15d8f947-4b2f-4605-a19a-2908a833d854" path="/var/lib/kubelet/pods/15d8f947-4b2f-4605-a19a-2908a833d854/volumes" Sep 29 21:51:34 crc kubenswrapper[4911]: I0929 21:51:34.718691 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5bae9375-72d0-4d80-a85b-00e594f08ec5" path="/var/lib/kubelet/pods/5bae9375-72d0-4d80-a85b-00e594f08ec5/volumes" Sep 29 21:51:34 crc kubenswrapper[4911]: I0929 21:51:34.719875 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c4176d75-f7e0-4327-97cc-f1d89925650f" path="/var/lib/kubelet/pods/c4176d75-f7e0-4327-97cc-f1d89925650f/volumes" Sep 29 21:51:36 crc kubenswrapper[4911]: I0929 21:51:36.008275 4911 scope.go:117] "RemoveContainer" containerID="0e18a6afa3fe010b0c3d20173b764f4d2e215d032365b1fc5fc9d1b342a63e15" Sep 29 21:51:36 crc kubenswrapper[4911]: I0929 21:51:36.056910 4911 scope.go:117] "RemoveContainer" containerID="25ffa1d06c887d7b7a36eb2c184d1b9b41051db863020671fd23f778ad04e12e" Sep 29 21:51:36 crc kubenswrapper[4911]: I0929 21:51:36.101337 4911 scope.go:117] "RemoveContainer" containerID="0f73f203cc8f44a43c149655119a453cfcbd51d7e091efa9966818376cc936d0" Sep 29 21:51:36 crc kubenswrapper[4911]: I0929 21:51:36.128493 4911 scope.go:117] "RemoveContainer" containerID="378795debcf63ff3b42f8b80f1ae7e232a330f411c1ccff346293a475fb76741" Sep 29 21:51:36 crc kubenswrapper[4911]: I0929 21:51:36.169993 4911 scope.go:117] "RemoveContainer" containerID="184725e6f5a973a9f3a15441519c0ea6d0a20973970d107a5c3a299e42dfd573" Sep 29 21:51:36 crc kubenswrapper[4911]: I0929 21:51:36.209584 4911 scope.go:117] "RemoveContainer" containerID="b63faa3e3e537bb1eaa6a56fa565b1e4d107c246b5dab88a7cd4375eafb3270c" Sep 29 21:51:36 crc kubenswrapper[4911]: I0929 21:51:36.316089 4911 scope.go:117] "RemoveContainer" containerID="ee1bcd6779a9cc37261fd73aff389d679acb273ed48d8724fb8a3d7428dffff6" Sep 29 21:51:36 crc kubenswrapper[4911]: I0929 21:51:36.337512 4911 scope.go:117] "RemoveContainer" containerID="ba219af358822d0e7bb0b79315aaafe1e25c191120f72f2a4bc3832aaa6e68b8" Sep 29 21:51:36 crc kubenswrapper[4911]: I0929 21:51:36.355577 4911 scope.go:117] "RemoveContainer" containerID="5e9a9bd7f96624db09f9d237d6cc521a52fba8449274fa40eac2e92c7a4ff51b" Sep 29 21:51:36 crc kubenswrapper[4911]: I0929 21:51:36.733013 4911 generic.go:334] "Generic (PLEG): container finished" podID="a7ffc2de-d6d9-4e9f-a6c8-a4080e155c99" containerID="9b7528a40eb2e50577b5a5ba6e9f8fd35524b121553aed61d09a3fa3c0b6ad6e" exitCode=0 Sep 29 21:51:36 crc kubenswrapper[4911]: I0929 21:51:36.733069 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-m4ghz" event={"ID":"a7ffc2de-d6d9-4e9f-a6c8-a4080e155c99","Type":"ContainerDied","Data":"9b7528a40eb2e50577b5a5ba6e9f8fd35524b121553aed61d09a3fa3c0b6ad6e"} Sep 29 21:51:37 crc kubenswrapper[4911]: I0929 21:51:37.046734 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-w6kfd"] Sep 29 21:51:37 crc kubenswrapper[4911]: I0929 21:51:37.060206 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-w6kfd"] Sep 29 21:51:38 crc kubenswrapper[4911]: I0929 21:51:38.033097 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-wfp9c"] Sep 29 21:51:38 crc kubenswrapper[4911]: I0929 21:51:38.040421 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-wfp9c"] Sep 29 21:51:38 crc kubenswrapper[4911]: I0929 21:51:38.183118 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-m4ghz" Sep 29 21:51:38 crc kubenswrapper[4911]: I0929 21:51:38.293208 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a7ffc2de-d6d9-4e9f-a6c8-a4080e155c99-ssh-key\") pod \"a7ffc2de-d6d9-4e9f-a6c8-a4080e155c99\" (UID: \"a7ffc2de-d6d9-4e9f-a6c8-a4080e155c99\") " Sep 29 21:51:38 crc kubenswrapper[4911]: I0929 21:51:38.293267 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8v82z\" (UniqueName: \"kubernetes.io/projected/a7ffc2de-d6d9-4e9f-a6c8-a4080e155c99-kube-api-access-8v82z\") pod \"a7ffc2de-d6d9-4e9f-a6c8-a4080e155c99\" (UID: \"a7ffc2de-d6d9-4e9f-a6c8-a4080e155c99\") " Sep 29 21:51:38 crc kubenswrapper[4911]: I0929 21:51:38.293372 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a7ffc2de-d6d9-4e9f-a6c8-a4080e155c99-inventory\") pod \"a7ffc2de-d6d9-4e9f-a6c8-a4080e155c99\" (UID: \"a7ffc2de-d6d9-4e9f-a6c8-a4080e155c99\") " Sep 29 21:51:38 crc kubenswrapper[4911]: I0929 21:51:38.299979 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a7ffc2de-d6d9-4e9f-a6c8-a4080e155c99-kube-api-access-8v82z" (OuterVolumeSpecName: "kube-api-access-8v82z") pod "a7ffc2de-d6d9-4e9f-a6c8-a4080e155c99" (UID: "a7ffc2de-d6d9-4e9f-a6c8-a4080e155c99"). InnerVolumeSpecName "kube-api-access-8v82z". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:51:38 crc kubenswrapper[4911]: I0929 21:51:38.320668 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7ffc2de-d6d9-4e9f-a6c8-a4080e155c99-inventory" (OuterVolumeSpecName: "inventory") pod "a7ffc2de-d6d9-4e9f-a6c8-a4080e155c99" (UID: "a7ffc2de-d6d9-4e9f-a6c8-a4080e155c99"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:51:38 crc kubenswrapper[4911]: I0929 21:51:38.320686 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7ffc2de-d6d9-4e9f-a6c8-a4080e155c99-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "a7ffc2de-d6d9-4e9f-a6c8-a4080e155c99" (UID: "a7ffc2de-d6d9-4e9f-a6c8-a4080e155c99"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:51:38 crc kubenswrapper[4911]: I0929 21:51:38.395871 4911 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a7ffc2de-d6d9-4e9f-a6c8-a4080e155c99-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 21:51:38 crc kubenswrapper[4911]: I0929 21:51:38.395931 4911 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a7ffc2de-d6d9-4e9f-a6c8-a4080e155c99-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 21:51:38 crc kubenswrapper[4911]: I0929 21:51:38.395974 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8v82z\" (UniqueName: \"kubernetes.io/projected/a7ffc2de-d6d9-4e9f-a6c8-a4080e155c99-kube-api-access-8v82z\") on node \"crc\" DevicePath \"\"" Sep 29 21:51:38 crc kubenswrapper[4911]: I0929 21:51:38.726443 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3e8211a0-b5a5-4dd2-8b18-6616202ebe45" path="/var/lib/kubelet/pods/3e8211a0-b5a5-4dd2-8b18-6616202ebe45/volumes" Sep 29 21:51:38 crc kubenswrapper[4911]: I0929 21:51:38.728083 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a94e08fd-5765-4631-8e54-ec22daab0ca1" path="/var/lib/kubelet/pods/a94e08fd-5765-4631-8e54-ec22daab0ca1/volumes" Sep 29 21:51:38 crc kubenswrapper[4911]: I0929 21:51:38.767065 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-m4ghz" event={"ID":"a7ffc2de-d6d9-4e9f-a6c8-a4080e155c99","Type":"ContainerDied","Data":"adefe1758d42583e837efde1e605f31368fc8f5252418021b8d036887cc23a5c"} Sep 29 21:51:38 crc kubenswrapper[4911]: I0929 21:51:38.767135 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="adefe1758d42583e837efde1e605f31368fc8f5252418021b8d036887cc23a5c" Sep 29 21:51:38 crc kubenswrapper[4911]: I0929 21:51:38.767164 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-m4ghz" Sep 29 21:51:38 crc kubenswrapper[4911]: I0929 21:51:38.849207 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-hsmg5"] Sep 29 21:51:38 crc kubenswrapper[4911]: E0929 21:51:38.849718 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7ffc2de-d6d9-4e9f-a6c8-a4080e155c99" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Sep 29 21:51:38 crc kubenswrapper[4911]: I0929 21:51:38.849746 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7ffc2de-d6d9-4e9f-a6c8-a4080e155c99" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Sep 29 21:51:38 crc kubenswrapper[4911]: I0929 21:51:38.849978 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7ffc2de-d6d9-4e9f-a6c8-a4080e155c99" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Sep 29 21:51:38 crc kubenswrapper[4911]: I0929 21:51:38.850812 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-hsmg5" Sep 29 21:51:38 crc kubenswrapper[4911]: I0929 21:51:38.854249 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 21:51:38 crc kubenswrapper[4911]: I0929 21:51:38.854249 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 21:51:38 crc kubenswrapper[4911]: I0929 21:51:38.854474 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-h4cgt" Sep 29 21:51:38 crc kubenswrapper[4911]: I0929 21:51:38.854624 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 21:51:38 crc kubenswrapper[4911]: I0929 21:51:38.862753 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-hsmg5"] Sep 29 21:51:39 crc kubenswrapper[4911]: I0929 21:51:39.009285 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e6ae5985-a9f0-4e22-b7d0-fe5b6aa574e4-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-hsmg5\" (UID: \"e6ae5985-a9f0-4e22-b7d0-fe5b6aa574e4\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-hsmg5" Sep 29 21:51:39 crc kubenswrapper[4911]: I0929 21:51:39.009359 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-glzfr\" (UniqueName: \"kubernetes.io/projected/e6ae5985-a9f0-4e22-b7d0-fe5b6aa574e4-kube-api-access-glzfr\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-hsmg5\" (UID: \"e6ae5985-a9f0-4e22-b7d0-fe5b6aa574e4\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-hsmg5" Sep 29 21:51:39 crc kubenswrapper[4911]: I0929 21:51:39.009403 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e6ae5985-a9f0-4e22-b7d0-fe5b6aa574e4-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-hsmg5\" (UID: \"e6ae5985-a9f0-4e22-b7d0-fe5b6aa574e4\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-hsmg5" Sep 29 21:51:39 crc kubenswrapper[4911]: I0929 21:51:39.111823 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-glzfr\" (UniqueName: \"kubernetes.io/projected/e6ae5985-a9f0-4e22-b7d0-fe5b6aa574e4-kube-api-access-glzfr\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-hsmg5\" (UID: \"e6ae5985-a9f0-4e22-b7d0-fe5b6aa574e4\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-hsmg5" Sep 29 21:51:39 crc kubenswrapper[4911]: I0929 21:51:39.111975 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e6ae5985-a9f0-4e22-b7d0-fe5b6aa574e4-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-hsmg5\" (UID: \"e6ae5985-a9f0-4e22-b7d0-fe5b6aa574e4\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-hsmg5" Sep 29 21:51:39 crc kubenswrapper[4911]: I0929 21:51:39.112500 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e6ae5985-a9f0-4e22-b7d0-fe5b6aa574e4-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-hsmg5\" (UID: \"e6ae5985-a9f0-4e22-b7d0-fe5b6aa574e4\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-hsmg5" Sep 29 21:51:39 crc kubenswrapper[4911]: I0929 21:51:39.117361 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e6ae5985-a9f0-4e22-b7d0-fe5b6aa574e4-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-hsmg5\" (UID: \"e6ae5985-a9f0-4e22-b7d0-fe5b6aa574e4\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-hsmg5" Sep 29 21:51:39 crc kubenswrapper[4911]: I0929 21:51:39.118150 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e6ae5985-a9f0-4e22-b7d0-fe5b6aa574e4-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-hsmg5\" (UID: \"e6ae5985-a9f0-4e22-b7d0-fe5b6aa574e4\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-hsmg5" Sep 29 21:51:39 crc kubenswrapper[4911]: I0929 21:51:39.141843 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-glzfr\" (UniqueName: \"kubernetes.io/projected/e6ae5985-a9f0-4e22-b7d0-fe5b6aa574e4-kube-api-access-glzfr\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-hsmg5\" (UID: \"e6ae5985-a9f0-4e22-b7d0-fe5b6aa574e4\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-hsmg5" Sep 29 21:51:39 crc kubenswrapper[4911]: I0929 21:51:39.171141 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-hsmg5" Sep 29 21:51:39 crc kubenswrapper[4911]: I0929 21:51:39.760225 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-hsmg5"] Sep 29 21:51:39 crc kubenswrapper[4911]: I0929 21:51:39.779760 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-hsmg5" event={"ID":"e6ae5985-a9f0-4e22-b7d0-fe5b6aa574e4","Type":"ContainerStarted","Data":"71ea6b33b71b84b9c440e03ed564056b884cc962e8bedcd8e964693a7638e02c"} Sep 29 21:51:40 crc kubenswrapper[4911]: I0929 21:51:40.792977 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-hsmg5" event={"ID":"e6ae5985-a9f0-4e22-b7d0-fe5b6aa574e4","Type":"ContainerStarted","Data":"22072e4f32a147cdc34e3ac48e55696cf3523cefc374d4c8873d35e90adb7d5a"} Sep 29 21:51:40 crc kubenswrapper[4911]: I0929 21:51:40.817092 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-hsmg5" podStartSLOduration=2.300242867 podStartE2EDuration="2.817066174s" podCreationTimestamp="2025-09-29 21:51:38 +0000 UTC" firstStartedPulling="2025-09-29 21:51:39.756049297 +0000 UTC m=+1577.733161958" lastFinishedPulling="2025-09-29 21:51:40.272872584 +0000 UTC m=+1578.249985265" observedRunningTime="2025-09-29 21:51:40.810061285 +0000 UTC m=+1578.787173976" watchObservedRunningTime="2025-09-29 21:51:40.817066174 +0000 UTC m=+1578.794178845" Sep 29 21:51:41 crc kubenswrapper[4911]: I0929 21:51:41.701516 4911 scope.go:117] "RemoveContainer" containerID="d13b753cc9aafc8d8aea3ea478f3d4351488778dd330e53fcc751897853d776a" Sep 29 21:51:41 crc kubenswrapper[4911]: E0929 21:51:41.702108 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w647f_openshift-machine-config-operator(50640abc-40db-4390-82d1-f3cfc76da71c)\"" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" Sep 29 21:51:51 crc kubenswrapper[4911]: I0929 21:51:51.036315 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-17b1-account-create-qkj5h"] Sep 29 21:51:51 crc kubenswrapper[4911]: I0929 21:51:51.046857 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-9232-account-create-skzbz"] Sep 29 21:51:51 crc kubenswrapper[4911]: I0929 21:51:51.056142 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-9232-account-create-skzbz"] Sep 29 21:51:51 crc kubenswrapper[4911]: I0929 21:51:51.063733 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-17b1-account-create-qkj5h"] Sep 29 21:51:51 crc kubenswrapper[4911]: I0929 21:51:51.070340 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-f317-account-create-mfjgf"] Sep 29 21:51:51 crc kubenswrapper[4911]: I0929 21:51:51.079848 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-f317-account-create-mfjgf"] Sep 29 21:51:52 crc kubenswrapper[4911]: I0929 21:51:52.716701 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="33ce950e-7ceb-4ba0-b2d1-a9257f4cfcc2" path="/var/lib/kubelet/pods/33ce950e-7ceb-4ba0-b2d1-a9257f4cfcc2/volumes" Sep 29 21:51:52 crc kubenswrapper[4911]: I0929 21:51:52.717503 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4e462c34-9385-46b1-9707-87944ea13535" path="/var/lib/kubelet/pods/4e462c34-9385-46b1-9707-87944ea13535/volumes" Sep 29 21:51:52 crc kubenswrapper[4911]: I0929 21:51:52.717970 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf8be712-8bd6-403d-b709-015e4be795d8" path="/var/lib/kubelet/pods/bf8be712-8bd6-403d-b709-015e4be795d8/volumes" Sep 29 21:51:54 crc kubenswrapper[4911]: I0929 21:51:54.702935 4911 scope.go:117] "RemoveContainer" containerID="d13b753cc9aafc8d8aea3ea478f3d4351488778dd330e53fcc751897853d776a" Sep 29 21:51:54 crc kubenswrapper[4911]: E0929 21:51:54.703701 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w647f_openshift-machine-config-operator(50640abc-40db-4390-82d1-f3cfc76da71c)\"" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" Sep 29 21:51:55 crc kubenswrapper[4911]: I0929 21:51:55.039133 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-vkxxs"] Sep 29 21:51:55 crc kubenswrapper[4911]: I0929 21:51:55.047321 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-vkxxs"] Sep 29 21:51:56 crc kubenswrapper[4911]: I0929 21:51:56.038432 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-xpfbw"] Sep 29 21:51:56 crc kubenswrapper[4911]: I0929 21:51:56.050639 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-xpfbw"] Sep 29 21:51:56 crc kubenswrapper[4911]: I0929 21:51:56.710684 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="373766c8-b8c6-4f57-b43b-24667ddb9564" path="/var/lib/kubelet/pods/373766c8-b8c6-4f57-b43b-24667ddb9564/volumes" Sep 29 21:51:56 crc kubenswrapper[4911]: I0929 21:51:56.711596 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8099e715-8ed5-40c3-9e20-1a2e873b867a" path="/var/lib/kubelet/pods/8099e715-8ed5-40c3-9e20-1a2e873b867a/volumes" Sep 29 21:52:06 crc kubenswrapper[4911]: I0929 21:52:06.701292 4911 scope.go:117] "RemoveContainer" containerID="d13b753cc9aafc8d8aea3ea478f3d4351488778dd330e53fcc751897853d776a" Sep 29 21:52:06 crc kubenswrapper[4911]: E0929 21:52:06.702380 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w647f_openshift-machine-config-operator(50640abc-40db-4390-82d1-f3cfc76da71c)\"" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" Sep 29 21:52:16 crc kubenswrapper[4911]: I0929 21:52:16.051736 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-nt89p"] Sep 29 21:52:16 crc kubenswrapper[4911]: I0929 21:52:16.067818 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-nt89p"] Sep 29 21:52:16 crc kubenswrapper[4911]: I0929 21:52:16.713408 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="84edca47-0d64-4494-b7a9-84712fb6515b" path="/var/lib/kubelet/pods/84edca47-0d64-4494-b7a9-84712fb6515b/volumes" Sep 29 21:52:21 crc kubenswrapper[4911]: I0929 21:52:21.701177 4911 scope.go:117] "RemoveContainer" containerID="d13b753cc9aafc8d8aea3ea478f3d4351488778dd330e53fcc751897853d776a" Sep 29 21:52:21 crc kubenswrapper[4911]: E0929 21:52:21.702491 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w647f_openshift-machine-config-operator(50640abc-40db-4390-82d1-f3cfc76da71c)\"" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" Sep 29 21:52:26 crc kubenswrapper[4911]: I0929 21:52:26.065187 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-rm86n"] Sep 29 21:52:26 crc kubenswrapper[4911]: I0929 21:52:26.080742 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-rm86n"] Sep 29 21:52:26 crc kubenswrapper[4911]: I0929 21:52:26.721388 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="67b23e82-069b-48d1-b154-5280fea52947" path="/var/lib/kubelet/pods/67b23e82-069b-48d1-b154-5280fea52947/volumes" Sep 29 21:52:36 crc kubenswrapper[4911]: I0929 21:52:36.576698 4911 scope.go:117] "RemoveContainer" containerID="1112a4f45be50979d9b61d096a4a4426e4f656e845ea656ff3e02bff8e2818c3" Sep 29 21:52:36 crc kubenswrapper[4911]: I0929 21:52:36.618270 4911 scope.go:117] "RemoveContainer" containerID="f0eea6d3731afd22996d55a08ad6021c3370b28d76bf48922883a11065b6e603" Sep 29 21:52:36 crc kubenswrapper[4911]: I0929 21:52:36.689081 4911 scope.go:117] "RemoveContainer" containerID="6ef83c83bb9f3e1ac135fccde8835799fb03907f0a39ceaacdd8b5b817560052" Sep 29 21:52:36 crc kubenswrapper[4911]: I0929 21:52:36.701978 4911 scope.go:117] "RemoveContainer" containerID="d13b753cc9aafc8d8aea3ea478f3d4351488778dd330e53fcc751897853d776a" Sep 29 21:52:36 crc kubenswrapper[4911]: E0929 21:52:36.702425 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w647f_openshift-machine-config-operator(50640abc-40db-4390-82d1-f3cfc76da71c)\"" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" Sep 29 21:52:36 crc kubenswrapper[4911]: I0929 21:52:36.801140 4911 scope.go:117] "RemoveContainer" containerID="9c345e5b5cb1fe927e5550e3460d26da1aee54bbf084b90aaf78c7d478d45088" Sep 29 21:52:36 crc kubenswrapper[4911]: I0929 21:52:36.835481 4911 scope.go:117] "RemoveContainer" containerID="134d7d482dcdb7c997c96a6bd81e94c04338e5dd41ccc582689b9d3a127edb17" Sep 29 21:52:36 crc kubenswrapper[4911]: I0929 21:52:36.893033 4911 scope.go:117] "RemoveContainer" containerID="27a1ee14f12ddf347840cab9d0102756695589be15e84b6cb72e2e72aa8b748e" Sep 29 21:52:36 crc kubenswrapper[4911]: I0929 21:52:36.934336 4911 scope.go:117] "RemoveContainer" containerID="7c70bc3366af620d994b0869bda26a39f3a4f027e7aab1efb4bbfeaeadb8f99b" Sep 29 21:52:36 crc kubenswrapper[4911]: I0929 21:52:36.983764 4911 scope.go:117] "RemoveContainer" containerID="288c6d420e2975a9b261dcf8c4d8f3f77ab2873dee67bbf7f5c807c9dd6be63c" Sep 29 21:52:37 crc kubenswrapper[4911]: I0929 21:52:37.034310 4911 scope.go:117] "RemoveContainer" containerID="9005c2a1ba04eeb6a6c36c68be084652f844eebc07be3aac6ad5f0dab442de10" Sep 29 21:52:42 crc kubenswrapper[4911]: I0929 21:52:42.052359 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-m4r9b"] Sep 29 21:52:42 crc kubenswrapper[4911]: I0929 21:52:42.060081 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-m4r9b"] Sep 29 21:52:42 crc kubenswrapper[4911]: I0929 21:52:42.736931 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3b39a884-4bfd-4927-af16-6ce025d131fc" path="/var/lib/kubelet/pods/3b39a884-4bfd-4927-af16-6ce025d131fc/volumes" Sep 29 21:52:47 crc kubenswrapper[4911]: I0929 21:52:47.701311 4911 scope.go:117] "RemoveContainer" containerID="d13b753cc9aafc8d8aea3ea478f3d4351488778dd330e53fcc751897853d776a" Sep 29 21:52:47 crc kubenswrapper[4911]: E0929 21:52:47.702269 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w647f_openshift-machine-config-operator(50640abc-40db-4390-82d1-f3cfc76da71c)\"" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" Sep 29 21:52:54 crc kubenswrapper[4911]: I0929 21:52:54.044384 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-jg9nq"] Sep 29 21:52:54 crc kubenswrapper[4911]: I0929 21:52:54.053742 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-jg9nq"] Sep 29 21:52:54 crc kubenswrapper[4911]: I0929 21:52:54.061625 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-g8vhl"] Sep 29 21:52:54 crc kubenswrapper[4911]: I0929 21:52:54.067940 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-g8vhl"] Sep 29 21:52:54 crc kubenswrapper[4911]: I0929 21:52:54.073947 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-gc5vr"] Sep 29 21:52:54 crc kubenswrapper[4911]: I0929 21:52:54.079769 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-gc5vr"] Sep 29 21:52:54 crc kubenswrapper[4911]: I0929 21:52:54.719770 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="13437aeb-5dc5-428b-ba77-1265e2f44468" path="/var/lib/kubelet/pods/13437aeb-5dc5-428b-ba77-1265e2f44468/volumes" Sep 29 21:52:54 crc kubenswrapper[4911]: I0929 21:52:54.721141 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="74649df7-81b3-46eb-bd14-0ab5a40d1634" path="/var/lib/kubelet/pods/74649df7-81b3-46eb-bd14-0ab5a40d1634/volumes" Sep 29 21:52:54 crc kubenswrapper[4911]: I0929 21:52:54.722261 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="847ed503-c9fa-4f0b-ac71-6f990bdf2fac" path="/var/lib/kubelet/pods/847ed503-c9fa-4f0b-ac71-6f990bdf2fac/volumes" Sep 29 21:52:56 crc kubenswrapper[4911]: I0929 21:52:56.707193 4911 generic.go:334] "Generic (PLEG): container finished" podID="e6ae5985-a9f0-4e22-b7d0-fe5b6aa574e4" containerID="22072e4f32a147cdc34e3ac48e55696cf3523cefc374d4c8873d35e90adb7d5a" exitCode=0 Sep 29 21:52:56 crc kubenswrapper[4911]: I0929 21:52:56.720775 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-hsmg5" event={"ID":"e6ae5985-a9f0-4e22-b7d0-fe5b6aa574e4","Type":"ContainerDied","Data":"22072e4f32a147cdc34e3ac48e55696cf3523cefc374d4c8873d35e90adb7d5a"} Sep 29 21:52:58 crc kubenswrapper[4911]: I0929 21:52:58.126290 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-hsmg5" Sep 29 21:52:58 crc kubenswrapper[4911]: I0929 21:52:58.281168 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e6ae5985-a9f0-4e22-b7d0-fe5b6aa574e4-ssh-key\") pod \"e6ae5985-a9f0-4e22-b7d0-fe5b6aa574e4\" (UID: \"e6ae5985-a9f0-4e22-b7d0-fe5b6aa574e4\") " Sep 29 21:52:58 crc kubenswrapper[4911]: I0929 21:52:58.281224 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-glzfr\" (UniqueName: \"kubernetes.io/projected/e6ae5985-a9f0-4e22-b7d0-fe5b6aa574e4-kube-api-access-glzfr\") pod \"e6ae5985-a9f0-4e22-b7d0-fe5b6aa574e4\" (UID: \"e6ae5985-a9f0-4e22-b7d0-fe5b6aa574e4\") " Sep 29 21:52:58 crc kubenswrapper[4911]: I0929 21:52:58.281430 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e6ae5985-a9f0-4e22-b7d0-fe5b6aa574e4-inventory\") pod \"e6ae5985-a9f0-4e22-b7d0-fe5b6aa574e4\" (UID: \"e6ae5985-a9f0-4e22-b7d0-fe5b6aa574e4\") " Sep 29 21:52:58 crc kubenswrapper[4911]: I0929 21:52:58.286804 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e6ae5985-a9f0-4e22-b7d0-fe5b6aa574e4-kube-api-access-glzfr" (OuterVolumeSpecName: "kube-api-access-glzfr") pod "e6ae5985-a9f0-4e22-b7d0-fe5b6aa574e4" (UID: "e6ae5985-a9f0-4e22-b7d0-fe5b6aa574e4"). InnerVolumeSpecName "kube-api-access-glzfr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:52:58 crc kubenswrapper[4911]: I0929 21:52:58.307105 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e6ae5985-a9f0-4e22-b7d0-fe5b6aa574e4-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "e6ae5985-a9f0-4e22-b7d0-fe5b6aa574e4" (UID: "e6ae5985-a9f0-4e22-b7d0-fe5b6aa574e4"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:52:58 crc kubenswrapper[4911]: I0929 21:52:58.314878 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e6ae5985-a9f0-4e22-b7d0-fe5b6aa574e4-inventory" (OuterVolumeSpecName: "inventory") pod "e6ae5985-a9f0-4e22-b7d0-fe5b6aa574e4" (UID: "e6ae5985-a9f0-4e22-b7d0-fe5b6aa574e4"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:52:58 crc kubenswrapper[4911]: I0929 21:52:58.383605 4911 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e6ae5985-a9f0-4e22-b7d0-fe5b6aa574e4-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 21:52:58 crc kubenswrapper[4911]: I0929 21:52:58.383635 4911 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e6ae5985-a9f0-4e22-b7d0-fe5b6aa574e4-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 21:52:58 crc kubenswrapper[4911]: I0929 21:52:58.383645 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-glzfr\" (UniqueName: \"kubernetes.io/projected/e6ae5985-a9f0-4e22-b7d0-fe5b6aa574e4-kube-api-access-glzfr\") on node \"crc\" DevicePath \"\"" Sep 29 21:52:58 crc kubenswrapper[4911]: I0929 21:52:58.701513 4911 scope.go:117] "RemoveContainer" containerID="d13b753cc9aafc8d8aea3ea478f3d4351488778dd330e53fcc751897853d776a" Sep 29 21:52:58 crc kubenswrapper[4911]: E0929 21:52:58.702078 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w647f_openshift-machine-config-operator(50640abc-40db-4390-82d1-f3cfc76da71c)\"" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" Sep 29 21:52:58 crc kubenswrapper[4911]: I0929 21:52:58.735331 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-hsmg5" event={"ID":"e6ae5985-a9f0-4e22-b7d0-fe5b6aa574e4","Type":"ContainerDied","Data":"71ea6b33b71b84b9c440e03ed564056b884cc962e8bedcd8e964693a7638e02c"} Sep 29 21:52:58 crc kubenswrapper[4911]: I0929 21:52:58.735692 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="71ea6b33b71b84b9c440e03ed564056b884cc962e8bedcd8e964693a7638e02c" Sep 29 21:52:58 crc kubenswrapper[4911]: I0929 21:52:58.735887 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-hsmg5" Sep 29 21:52:58 crc kubenswrapper[4911]: I0929 21:52:58.840482 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ncdbx"] Sep 29 21:52:58 crc kubenswrapper[4911]: E0929 21:52:58.841186 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6ae5985-a9f0-4e22-b7d0-fe5b6aa574e4" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Sep 29 21:52:58 crc kubenswrapper[4911]: I0929 21:52:58.841204 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6ae5985-a9f0-4e22-b7d0-fe5b6aa574e4" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Sep 29 21:52:58 crc kubenswrapper[4911]: I0929 21:52:58.841407 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="e6ae5985-a9f0-4e22-b7d0-fe5b6aa574e4" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Sep 29 21:52:58 crc kubenswrapper[4911]: I0929 21:52:58.842003 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ncdbx" Sep 29 21:52:58 crc kubenswrapper[4911]: I0929 21:52:58.844109 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 21:52:58 crc kubenswrapper[4911]: I0929 21:52:58.844124 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 21:52:58 crc kubenswrapper[4911]: I0929 21:52:58.844505 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-h4cgt" Sep 29 21:52:58 crc kubenswrapper[4911]: I0929 21:52:58.847607 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 21:52:58 crc kubenswrapper[4911]: I0929 21:52:58.855395 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ncdbx"] Sep 29 21:52:58 crc kubenswrapper[4911]: I0929 21:52:58.997154 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zf7p6\" (UniqueName: \"kubernetes.io/projected/697342c7-feea-4250-90f3-adca6bcada86-kube-api-access-zf7p6\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-ncdbx\" (UID: \"697342c7-feea-4250-90f3-adca6bcada86\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ncdbx" Sep 29 21:52:58 crc kubenswrapper[4911]: I0929 21:52:58.997784 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/697342c7-feea-4250-90f3-adca6bcada86-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-ncdbx\" (UID: \"697342c7-feea-4250-90f3-adca6bcada86\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ncdbx" Sep 29 21:52:58 crc kubenswrapper[4911]: I0929 21:52:58.997938 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/697342c7-feea-4250-90f3-adca6bcada86-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-ncdbx\" (UID: \"697342c7-feea-4250-90f3-adca6bcada86\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ncdbx" Sep 29 21:52:59 crc kubenswrapper[4911]: I0929 21:52:59.099850 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/697342c7-feea-4250-90f3-adca6bcada86-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-ncdbx\" (UID: \"697342c7-feea-4250-90f3-adca6bcada86\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ncdbx" Sep 29 21:52:59 crc kubenswrapper[4911]: I0929 21:52:59.099896 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/697342c7-feea-4250-90f3-adca6bcada86-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-ncdbx\" (UID: \"697342c7-feea-4250-90f3-adca6bcada86\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ncdbx" Sep 29 21:52:59 crc kubenswrapper[4911]: I0929 21:52:59.099935 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zf7p6\" (UniqueName: \"kubernetes.io/projected/697342c7-feea-4250-90f3-adca6bcada86-kube-api-access-zf7p6\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-ncdbx\" (UID: \"697342c7-feea-4250-90f3-adca6bcada86\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ncdbx" Sep 29 21:52:59 crc kubenswrapper[4911]: I0929 21:52:59.108730 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/697342c7-feea-4250-90f3-adca6bcada86-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-ncdbx\" (UID: \"697342c7-feea-4250-90f3-adca6bcada86\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ncdbx" Sep 29 21:52:59 crc kubenswrapper[4911]: I0929 21:52:59.108939 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/697342c7-feea-4250-90f3-adca6bcada86-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-ncdbx\" (UID: \"697342c7-feea-4250-90f3-adca6bcada86\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ncdbx" Sep 29 21:52:59 crc kubenswrapper[4911]: I0929 21:52:59.117106 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zf7p6\" (UniqueName: \"kubernetes.io/projected/697342c7-feea-4250-90f3-adca6bcada86-kube-api-access-zf7p6\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-ncdbx\" (UID: \"697342c7-feea-4250-90f3-adca6bcada86\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ncdbx" Sep 29 21:52:59 crc kubenswrapper[4911]: I0929 21:52:59.160520 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ncdbx" Sep 29 21:52:59 crc kubenswrapper[4911]: I0929 21:52:59.683178 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ncdbx"] Sep 29 21:52:59 crc kubenswrapper[4911]: I0929 21:52:59.745750 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ncdbx" event={"ID":"697342c7-feea-4250-90f3-adca6bcada86","Type":"ContainerStarted","Data":"0f4d3b3d14331b0ca2bbab54d76efbeb810a51cea8262f78772e3121310f0f32"} Sep 29 21:53:00 crc kubenswrapper[4911]: I0929 21:53:00.759178 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ncdbx" event={"ID":"697342c7-feea-4250-90f3-adca6bcada86","Type":"ContainerStarted","Data":"267e7ddb25e418191a49bc2684955f379488e1724c04139a04e2f6c3b84220c0"} Sep 29 21:53:00 crc kubenswrapper[4911]: I0929 21:53:00.772251 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ncdbx" podStartSLOduration=2.248367058 podStartE2EDuration="2.772235784s" podCreationTimestamp="2025-09-29 21:52:58 +0000 UTC" firstStartedPulling="2025-09-29 21:52:59.692229255 +0000 UTC m=+1657.669341926" lastFinishedPulling="2025-09-29 21:53:00.216097941 +0000 UTC m=+1658.193210652" observedRunningTime="2025-09-29 21:53:00.771992407 +0000 UTC m=+1658.749105118" watchObservedRunningTime="2025-09-29 21:53:00.772235784 +0000 UTC m=+1658.749348455" Sep 29 21:53:04 crc kubenswrapper[4911]: I0929 21:53:04.036776 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-aecd-account-create-mpdgf"] Sep 29 21:53:04 crc kubenswrapper[4911]: I0929 21:53:04.047946 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-f4e5-account-create-4v4cn"] Sep 29 21:53:04 crc kubenswrapper[4911]: I0929 21:53:04.065146 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-7380-account-create-cr8m4"] Sep 29 21:53:04 crc kubenswrapper[4911]: I0929 21:53:04.081055 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-7380-account-create-cr8m4"] Sep 29 21:53:04 crc kubenswrapper[4911]: I0929 21:53:04.091927 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-aecd-account-create-mpdgf"] Sep 29 21:53:04 crc kubenswrapper[4911]: I0929 21:53:04.099387 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-f4e5-account-create-4v4cn"] Sep 29 21:53:04 crc kubenswrapper[4911]: I0929 21:53:04.720445 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="754ff701-048b-42ea-a812-f54def8ad721" path="/var/lib/kubelet/pods/754ff701-048b-42ea-a812-f54def8ad721/volumes" Sep 29 21:53:04 crc kubenswrapper[4911]: I0929 21:53:04.721324 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ea773222-bf22-4cb1-b26d-0ec95c9ef332" path="/var/lib/kubelet/pods/ea773222-bf22-4cb1-b26d-0ec95c9ef332/volumes" Sep 29 21:53:04 crc kubenswrapper[4911]: I0929 21:53:04.722164 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f943f20f-f0a2-4eb9-96b4-7bffe152853d" path="/var/lib/kubelet/pods/f943f20f-f0a2-4eb9-96b4-7bffe152853d/volumes" Sep 29 21:53:05 crc kubenswrapper[4911]: I0929 21:53:05.829371 4911 generic.go:334] "Generic (PLEG): container finished" podID="697342c7-feea-4250-90f3-adca6bcada86" containerID="267e7ddb25e418191a49bc2684955f379488e1724c04139a04e2f6c3b84220c0" exitCode=0 Sep 29 21:53:05 crc kubenswrapper[4911]: I0929 21:53:05.829454 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ncdbx" event={"ID":"697342c7-feea-4250-90f3-adca6bcada86","Type":"ContainerDied","Data":"267e7ddb25e418191a49bc2684955f379488e1724c04139a04e2f6c3b84220c0"} Sep 29 21:53:07 crc kubenswrapper[4911]: I0929 21:53:07.333141 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ncdbx" Sep 29 21:53:07 crc kubenswrapper[4911]: I0929 21:53:07.485122 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/697342c7-feea-4250-90f3-adca6bcada86-ssh-key\") pod \"697342c7-feea-4250-90f3-adca6bcada86\" (UID: \"697342c7-feea-4250-90f3-adca6bcada86\") " Sep 29 21:53:07 crc kubenswrapper[4911]: I0929 21:53:07.485246 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zf7p6\" (UniqueName: \"kubernetes.io/projected/697342c7-feea-4250-90f3-adca6bcada86-kube-api-access-zf7p6\") pod \"697342c7-feea-4250-90f3-adca6bcada86\" (UID: \"697342c7-feea-4250-90f3-adca6bcada86\") " Sep 29 21:53:07 crc kubenswrapper[4911]: I0929 21:53:07.485377 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/697342c7-feea-4250-90f3-adca6bcada86-inventory\") pod \"697342c7-feea-4250-90f3-adca6bcada86\" (UID: \"697342c7-feea-4250-90f3-adca6bcada86\") " Sep 29 21:53:07 crc kubenswrapper[4911]: I0929 21:53:07.493919 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/697342c7-feea-4250-90f3-adca6bcada86-kube-api-access-zf7p6" (OuterVolumeSpecName: "kube-api-access-zf7p6") pod "697342c7-feea-4250-90f3-adca6bcada86" (UID: "697342c7-feea-4250-90f3-adca6bcada86"). InnerVolumeSpecName "kube-api-access-zf7p6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:53:07 crc kubenswrapper[4911]: I0929 21:53:07.547558 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/697342c7-feea-4250-90f3-adca6bcada86-inventory" (OuterVolumeSpecName: "inventory") pod "697342c7-feea-4250-90f3-adca6bcada86" (UID: "697342c7-feea-4250-90f3-adca6bcada86"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:53:07 crc kubenswrapper[4911]: I0929 21:53:07.549107 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/697342c7-feea-4250-90f3-adca6bcada86-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "697342c7-feea-4250-90f3-adca6bcada86" (UID: "697342c7-feea-4250-90f3-adca6bcada86"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:53:07 crc kubenswrapper[4911]: I0929 21:53:07.588195 4911 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/697342c7-feea-4250-90f3-adca6bcada86-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 21:53:07 crc kubenswrapper[4911]: I0929 21:53:07.588233 4911 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/697342c7-feea-4250-90f3-adca6bcada86-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 21:53:07 crc kubenswrapper[4911]: I0929 21:53:07.588247 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zf7p6\" (UniqueName: \"kubernetes.io/projected/697342c7-feea-4250-90f3-adca6bcada86-kube-api-access-zf7p6\") on node \"crc\" DevicePath \"\"" Sep 29 21:53:07 crc kubenswrapper[4911]: I0929 21:53:07.851340 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ncdbx" event={"ID":"697342c7-feea-4250-90f3-adca6bcada86","Type":"ContainerDied","Data":"0f4d3b3d14331b0ca2bbab54d76efbeb810a51cea8262f78772e3121310f0f32"} Sep 29 21:53:07 crc kubenswrapper[4911]: I0929 21:53:07.851396 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0f4d3b3d14331b0ca2bbab54d76efbeb810a51cea8262f78772e3121310f0f32" Sep 29 21:53:07 crc kubenswrapper[4911]: I0929 21:53:07.851450 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ncdbx" Sep 29 21:53:07 crc kubenswrapper[4911]: I0929 21:53:07.976407 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-cp7l4"] Sep 29 21:53:07 crc kubenswrapper[4911]: E0929 21:53:07.976843 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="697342c7-feea-4250-90f3-adca6bcada86" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Sep 29 21:53:07 crc kubenswrapper[4911]: I0929 21:53:07.976862 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="697342c7-feea-4250-90f3-adca6bcada86" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Sep 29 21:53:07 crc kubenswrapper[4911]: I0929 21:53:07.977082 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="697342c7-feea-4250-90f3-adca6bcada86" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Sep 29 21:53:07 crc kubenswrapper[4911]: I0929 21:53:07.977773 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-cp7l4" Sep 29 21:53:07 crc kubenswrapper[4911]: I0929 21:53:07.979465 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 21:53:07 crc kubenswrapper[4911]: I0929 21:53:07.980033 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-h4cgt" Sep 29 21:53:07 crc kubenswrapper[4911]: I0929 21:53:07.982016 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 21:53:07 crc kubenswrapper[4911]: I0929 21:53:07.983097 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 21:53:07 crc kubenswrapper[4911]: I0929 21:53:07.993565 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-cp7l4"] Sep 29 21:53:08 crc kubenswrapper[4911]: I0929 21:53:08.099076 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/78314a84-b641-4e3e-9aff-f4c9dd5553fe-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-cp7l4\" (UID: \"78314a84-b641-4e3e-9aff-f4c9dd5553fe\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-cp7l4" Sep 29 21:53:08 crc kubenswrapper[4911]: I0929 21:53:08.099163 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/78314a84-b641-4e3e-9aff-f4c9dd5553fe-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-cp7l4\" (UID: \"78314a84-b641-4e3e-9aff-f4c9dd5553fe\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-cp7l4" Sep 29 21:53:08 crc kubenswrapper[4911]: I0929 21:53:08.099489 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n997m\" (UniqueName: \"kubernetes.io/projected/78314a84-b641-4e3e-9aff-f4c9dd5553fe-kube-api-access-n997m\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-cp7l4\" (UID: \"78314a84-b641-4e3e-9aff-f4c9dd5553fe\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-cp7l4" Sep 29 21:53:08 crc kubenswrapper[4911]: I0929 21:53:08.202011 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n997m\" (UniqueName: \"kubernetes.io/projected/78314a84-b641-4e3e-9aff-f4c9dd5553fe-kube-api-access-n997m\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-cp7l4\" (UID: \"78314a84-b641-4e3e-9aff-f4c9dd5553fe\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-cp7l4" Sep 29 21:53:08 crc kubenswrapper[4911]: I0929 21:53:08.202178 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/78314a84-b641-4e3e-9aff-f4c9dd5553fe-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-cp7l4\" (UID: \"78314a84-b641-4e3e-9aff-f4c9dd5553fe\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-cp7l4" Sep 29 21:53:08 crc kubenswrapper[4911]: I0929 21:53:08.202215 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/78314a84-b641-4e3e-9aff-f4c9dd5553fe-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-cp7l4\" (UID: \"78314a84-b641-4e3e-9aff-f4c9dd5553fe\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-cp7l4" Sep 29 21:53:08 crc kubenswrapper[4911]: I0929 21:53:08.207937 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/78314a84-b641-4e3e-9aff-f4c9dd5553fe-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-cp7l4\" (UID: \"78314a84-b641-4e3e-9aff-f4c9dd5553fe\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-cp7l4" Sep 29 21:53:08 crc kubenswrapper[4911]: I0929 21:53:08.208146 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/78314a84-b641-4e3e-9aff-f4c9dd5553fe-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-cp7l4\" (UID: \"78314a84-b641-4e3e-9aff-f4c9dd5553fe\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-cp7l4" Sep 29 21:53:08 crc kubenswrapper[4911]: I0929 21:53:08.224669 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n997m\" (UniqueName: \"kubernetes.io/projected/78314a84-b641-4e3e-9aff-f4c9dd5553fe-kube-api-access-n997m\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-cp7l4\" (UID: \"78314a84-b641-4e3e-9aff-f4c9dd5553fe\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-cp7l4" Sep 29 21:53:08 crc kubenswrapper[4911]: I0929 21:53:08.305693 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-cp7l4" Sep 29 21:53:08 crc kubenswrapper[4911]: I0929 21:53:08.866265 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-cp7l4"] Sep 29 21:53:08 crc kubenswrapper[4911]: W0929 21:53:08.871675 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod78314a84_b641_4e3e_9aff_f4c9dd5553fe.slice/crio-0c1a2f60546e6ca4d0d57b4b6811402b5385e1d58228d791fe169e397736fea9 WatchSource:0}: Error finding container 0c1a2f60546e6ca4d0d57b4b6811402b5385e1d58228d791fe169e397736fea9: Status 404 returned error can't find the container with id 0c1a2f60546e6ca4d0d57b4b6811402b5385e1d58228d791fe169e397736fea9 Sep 29 21:53:09 crc kubenswrapper[4911]: I0929 21:53:09.875120 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-cp7l4" event={"ID":"78314a84-b641-4e3e-9aff-f4c9dd5553fe","Type":"ContainerStarted","Data":"83e9dd1c068e27bac42a92b02adfd594005a3b1be23db86bac8cf0bd8edf20c9"} Sep 29 21:53:09 crc kubenswrapper[4911]: I0929 21:53:09.875554 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-cp7l4" event={"ID":"78314a84-b641-4e3e-9aff-f4c9dd5553fe","Type":"ContainerStarted","Data":"0c1a2f60546e6ca4d0d57b4b6811402b5385e1d58228d791fe169e397736fea9"} Sep 29 21:53:09 crc kubenswrapper[4911]: I0929 21:53:09.901681 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-cp7l4" podStartSLOduration=2.520203568 podStartE2EDuration="2.901659854s" podCreationTimestamp="2025-09-29 21:53:07 +0000 UTC" firstStartedPulling="2025-09-29 21:53:08.873703468 +0000 UTC m=+1666.850816139" lastFinishedPulling="2025-09-29 21:53:09.255159714 +0000 UTC m=+1667.232272425" observedRunningTime="2025-09-29 21:53:09.89477989 +0000 UTC m=+1667.871892591" watchObservedRunningTime="2025-09-29 21:53:09.901659854 +0000 UTC m=+1667.878772555" Sep 29 21:53:11 crc kubenswrapper[4911]: I0929 21:53:11.702991 4911 scope.go:117] "RemoveContainer" containerID="d13b753cc9aafc8d8aea3ea478f3d4351488778dd330e53fcc751897853d776a" Sep 29 21:53:11 crc kubenswrapper[4911]: E0929 21:53:11.703850 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w647f_openshift-machine-config-operator(50640abc-40db-4390-82d1-f3cfc76da71c)\"" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" Sep 29 21:53:26 crc kubenswrapper[4911]: I0929 21:53:26.055930 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-jvp47"] Sep 29 21:53:26 crc kubenswrapper[4911]: I0929 21:53:26.066774 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-jvp47"] Sep 29 21:53:26 crc kubenswrapper[4911]: I0929 21:53:26.701562 4911 scope.go:117] "RemoveContainer" containerID="d13b753cc9aafc8d8aea3ea478f3d4351488778dd330e53fcc751897853d776a" Sep 29 21:53:26 crc kubenswrapper[4911]: E0929 21:53:26.702028 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w647f_openshift-machine-config-operator(50640abc-40db-4390-82d1-f3cfc76da71c)\"" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" Sep 29 21:53:26 crc kubenswrapper[4911]: I0929 21:53:26.721566 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9a020ecb-1b48-4428-a32f-8593034ab88b" path="/var/lib/kubelet/pods/9a020ecb-1b48-4428-a32f-8593034ab88b/volumes" Sep 29 21:53:37 crc kubenswrapper[4911]: I0929 21:53:37.279329 4911 scope.go:117] "RemoveContainer" containerID="808ad20d12061e1c2b0ba909bc0fd0424a6636624ba4772feeab19621ae307e1" Sep 29 21:53:37 crc kubenswrapper[4911]: I0929 21:53:37.310902 4911 scope.go:117] "RemoveContainer" containerID="7684130f2b8f0aeb2fee8e849b221b5170e0407f57279e8b6356fe5d175e4eb0" Sep 29 21:53:37 crc kubenswrapper[4911]: I0929 21:53:37.391567 4911 scope.go:117] "RemoveContainer" containerID="4e552f7c86495e355ec94799db5ab2105eb24ef97acbf33c9055deb002800d6c" Sep 29 21:53:37 crc kubenswrapper[4911]: I0929 21:53:37.427090 4911 scope.go:117] "RemoveContainer" containerID="64d76e2842031af1c9210cb8228a1e805574730be2e219ab06de0ff0343b246d" Sep 29 21:53:37 crc kubenswrapper[4911]: I0929 21:53:37.502908 4911 scope.go:117] "RemoveContainer" containerID="f43a8fac91d8f04fe6616c91fd478bef02733c439ff2910c77d32fbbdd630e32" Sep 29 21:53:37 crc kubenswrapper[4911]: I0929 21:53:37.529950 4911 scope.go:117] "RemoveContainer" containerID="5cd972f82ce850ec1fb9160011aac74fd03f585fa9d927cd89c4be3f1f9f6f7f" Sep 29 21:53:37 crc kubenswrapper[4911]: I0929 21:53:37.563829 4911 scope.go:117] "RemoveContainer" containerID="7bec0c2668962cc66c93bd3c318f7194f37988be3f9628ffe93b22e45c485ab3" Sep 29 21:53:37 crc kubenswrapper[4911]: I0929 21:53:37.589716 4911 scope.go:117] "RemoveContainer" containerID="57d09cbec1d9733080d78c392e2828679789417b7981b6e0150ed1afa12353c0" Sep 29 21:53:37 crc kubenswrapper[4911]: I0929 21:53:37.701674 4911 scope.go:117] "RemoveContainer" containerID="d13b753cc9aafc8d8aea3ea478f3d4351488778dd330e53fcc751897853d776a" Sep 29 21:53:37 crc kubenswrapper[4911]: E0929 21:53:37.702164 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w647f_openshift-machine-config-operator(50640abc-40db-4390-82d1-f3cfc76da71c)\"" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" Sep 29 21:53:49 crc kubenswrapper[4911]: I0929 21:53:49.069460 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-49f9z"] Sep 29 21:53:49 crc kubenswrapper[4911]: I0929 21:53:49.086218 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-49f9z"] Sep 29 21:53:50 crc kubenswrapper[4911]: I0929 21:53:50.035738 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-xpzwc"] Sep 29 21:53:50 crc kubenswrapper[4911]: I0929 21:53:50.046943 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-xpzwc"] Sep 29 21:53:50 crc kubenswrapper[4911]: I0929 21:53:50.713709 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7c8f9423-562b-49fa-946d-10d52101e44c" path="/var/lib/kubelet/pods/7c8f9423-562b-49fa-946d-10d52101e44c/volumes" Sep 29 21:53:50 crc kubenswrapper[4911]: I0929 21:53:50.714762 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="823859df-f000-4749-acfe-eb9168574272" path="/var/lib/kubelet/pods/823859df-f000-4749-acfe-eb9168574272/volumes" Sep 29 21:53:51 crc kubenswrapper[4911]: I0929 21:53:51.332472 4911 generic.go:334] "Generic (PLEG): container finished" podID="78314a84-b641-4e3e-9aff-f4c9dd5553fe" containerID="83e9dd1c068e27bac42a92b02adfd594005a3b1be23db86bac8cf0bd8edf20c9" exitCode=0 Sep 29 21:53:51 crc kubenswrapper[4911]: I0929 21:53:51.332536 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-cp7l4" event={"ID":"78314a84-b641-4e3e-9aff-f4c9dd5553fe","Type":"ContainerDied","Data":"83e9dd1c068e27bac42a92b02adfd594005a3b1be23db86bac8cf0bd8edf20c9"} Sep 29 21:53:51 crc kubenswrapper[4911]: I0929 21:53:51.701359 4911 scope.go:117] "RemoveContainer" containerID="d13b753cc9aafc8d8aea3ea478f3d4351488778dd330e53fcc751897853d776a" Sep 29 21:53:51 crc kubenswrapper[4911]: E0929 21:53:51.701605 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w647f_openshift-machine-config-operator(50640abc-40db-4390-82d1-f3cfc76da71c)\"" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" Sep 29 21:53:52 crc kubenswrapper[4911]: I0929 21:53:52.873349 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-cp7l4" Sep 29 21:53:52 crc kubenswrapper[4911]: I0929 21:53:52.975418 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n997m\" (UniqueName: \"kubernetes.io/projected/78314a84-b641-4e3e-9aff-f4c9dd5553fe-kube-api-access-n997m\") pod \"78314a84-b641-4e3e-9aff-f4c9dd5553fe\" (UID: \"78314a84-b641-4e3e-9aff-f4c9dd5553fe\") " Sep 29 21:53:52 crc kubenswrapper[4911]: I0929 21:53:52.975629 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/78314a84-b641-4e3e-9aff-f4c9dd5553fe-inventory\") pod \"78314a84-b641-4e3e-9aff-f4c9dd5553fe\" (UID: \"78314a84-b641-4e3e-9aff-f4c9dd5553fe\") " Sep 29 21:53:52 crc kubenswrapper[4911]: I0929 21:53:52.975822 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/78314a84-b641-4e3e-9aff-f4c9dd5553fe-ssh-key\") pod \"78314a84-b641-4e3e-9aff-f4c9dd5553fe\" (UID: \"78314a84-b641-4e3e-9aff-f4c9dd5553fe\") " Sep 29 21:53:52 crc kubenswrapper[4911]: I0929 21:53:52.981843 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/78314a84-b641-4e3e-9aff-f4c9dd5553fe-kube-api-access-n997m" (OuterVolumeSpecName: "kube-api-access-n997m") pod "78314a84-b641-4e3e-9aff-f4c9dd5553fe" (UID: "78314a84-b641-4e3e-9aff-f4c9dd5553fe"). InnerVolumeSpecName "kube-api-access-n997m". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:53:53 crc kubenswrapper[4911]: I0929 21:53:53.012295 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/78314a84-b641-4e3e-9aff-f4c9dd5553fe-inventory" (OuterVolumeSpecName: "inventory") pod "78314a84-b641-4e3e-9aff-f4c9dd5553fe" (UID: "78314a84-b641-4e3e-9aff-f4c9dd5553fe"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:53:53 crc kubenswrapper[4911]: I0929 21:53:53.025054 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/78314a84-b641-4e3e-9aff-f4c9dd5553fe-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "78314a84-b641-4e3e-9aff-f4c9dd5553fe" (UID: "78314a84-b641-4e3e-9aff-f4c9dd5553fe"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:53:53 crc kubenswrapper[4911]: I0929 21:53:53.078402 4911 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/78314a84-b641-4e3e-9aff-f4c9dd5553fe-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 21:53:53 crc kubenswrapper[4911]: I0929 21:53:53.078432 4911 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/78314a84-b641-4e3e-9aff-f4c9dd5553fe-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 21:53:53 crc kubenswrapper[4911]: I0929 21:53:53.078442 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n997m\" (UniqueName: \"kubernetes.io/projected/78314a84-b641-4e3e-9aff-f4c9dd5553fe-kube-api-access-n997m\") on node \"crc\" DevicePath \"\"" Sep 29 21:53:53 crc kubenswrapper[4911]: I0929 21:53:53.359463 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-cp7l4" event={"ID":"78314a84-b641-4e3e-9aff-f4c9dd5553fe","Type":"ContainerDied","Data":"0c1a2f60546e6ca4d0d57b4b6811402b5385e1d58228d791fe169e397736fea9"} Sep 29 21:53:53 crc kubenswrapper[4911]: I0929 21:53:53.359519 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0c1a2f60546e6ca4d0d57b4b6811402b5385e1d58228d791fe169e397736fea9" Sep 29 21:53:53 crc kubenswrapper[4911]: I0929 21:53:53.359568 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-cp7l4" Sep 29 21:53:53 crc kubenswrapper[4911]: I0929 21:53:53.458888 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8hxpz"] Sep 29 21:53:53 crc kubenswrapper[4911]: E0929 21:53:53.459372 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78314a84-b641-4e3e-9aff-f4c9dd5553fe" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Sep 29 21:53:53 crc kubenswrapper[4911]: I0929 21:53:53.459394 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="78314a84-b641-4e3e-9aff-f4c9dd5553fe" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Sep 29 21:53:53 crc kubenswrapper[4911]: I0929 21:53:53.459625 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="78314a84-b641-4e3e-9aff-f4c9dd5553fe" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Sep 29 21:53:53 crc kubenswrapper[4911]: I0929 21:53:53.460401 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8hxpz" Sep 29 21:53:53 crc kubenswrapper[4911]: I0929 21:53:53.463742 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 21:53:53 crc kubenswrapper[4911]: I0929 21:53:53.463894 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-h4cgt" Sep 29 21:53:53 crc kubenswrapper[4911]: I0929 21:53:53.463955 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 21:53:53 crc kubenswrapper[4911]: I0929 21:53:53.464126 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 21:53:53 crc kubenswrapper[4911]: I0929 21:53:53.483306 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8hxpz"] Sep 29 21:53:53 crc kubenswrapper[4911]: I0929 21:53:53.586807 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e3e5ed96-035f-4645-a6b6-f92c01981ad4-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-8hxpz\" (UID: \"e3e5ed96-035f-4645-a6b6-f92c01981ad4\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8hxpz" Sep 29 21:53:53 crc kubenswrapper[4911]: I0929 21:53:53.586917 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-66fqs\" (UniqueName: \"kubernetes.io/projected/e3e5ed96-035f-4645-a6b6-f92c01981ad4-kube-api-access-66fqs\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-8hxpz\" (UID: \"e3e5ed96-035f-4645-a6b6-f92c01981ad4\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8hxpz" Sep 29 21:53:53 crc kubenswrapper[4911]: I0929 21:53:53.586947 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e3e5ed96-035f-4645-a6b6-f92c01981ad4-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-8hxpz\" (UID: \"e3e5ed96-035f-4645-a6b6-f92c01981ad4\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8hxpz" Sep 29 21:53:53 crc kubenswrapper[4911]: I0929 21:53:53.688173 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-66fqs\" (UniqueName: \"kubernetes.io/projected/e3e5ed96-035f-4645-a6b6-f92c01981ad4-kube-api-access-66fqs\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-8hxpz\" (UID: \"e3e5ed96-035f-4645-a6b6-f92c01981ad4\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8hxpz" Sep 29 21:53:53 crc kubenswrapper[4911]: I0929 21:53:53.688224 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e3e5ed96-035f-4645-a6b6-f92c01981ad4-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-8hxpz\" (UID: \"e3e5ed96-035f-4645-a6b6-f92c01981ad4\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8hxpz" Sep 29 21:53:53 crc kubenswrapper[4911]: I0929 21:53:53.688316 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e3e5ed96-035f-4645-a6b6-f92c01981ad4-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-8hxpz\" (UID: \"e3e5ed96-035f-4645-a6b6-f92c01981ad4\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8hxpz" Sep 29 21:53:53 crc kubenswrapper[4911]: I0929 21:53:53.692808 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e3e5ed96-035f-4645-a6b6-f92c01981ad4-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-8hxpz\" (UID: \"e3e5ed96-035f-4645-a6b6-f92c01981ad4\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8hxpz" Sep 29 21:53:53 crc kubenswrapper[4911]: I0929 21:53:53.693730 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e3e5ed96-035f-4645-a6b6-f92c01981ad4-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-8hxpz\" (UID: \"e3e5ed96-035f-4645-a6b6-f92c01981ad4\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8hxpz" Sep 29 21:53:53 crc kubenswrapper[4911]: I0929 21:53:53.704900 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-66fqs\" (UniqueName: \"kubernetes.io/projected/e3e5ed96-035f-4645-a6b6-f92c01981ad4-kube-api-access-66fqs\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-8hxpz\" (UID: \"e3e5ed96-035f-4645-a6b6-f92c01981ad4\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8hxpz" Sep 29 21:53:53 crc kubenswrapper[4911]: I0929 21:53:53.793691 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8hxpz" Sep 29 21:53:54 crc kubenswrapper[4911]: I0929 21:53:54.369855 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8hxpz"] Sep 29 21:53:55 crc kubenswrapper[4911]: I0929 21:53:55.381762 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8hxpz" event={"ID":"e3e5ed96-035f-4645-a6b6-f92c01981ad4","Type":"ContainerStarted","Data":"a06fbde2f72466a10713a3803797c11c444f4923c7de4bcdb8aff0de59c7492a"} Sep 29 21:53:55 crc kubenswrapper[4911]: I0929 21:53:55.382152 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8hxpz" event={"ID":"e3e5ed96-035f-4645-a6b6-f92c01981ad4","Type":"ContainerStarted","Data":"571b07b7922e4866e02c7b4499fe6bfa26d3a1e6503d9e6f43ce731cdf95a2a0"} Sep 29 21:53:55 crc kubenswrapper[4911]: I0929 21:53:55.406716 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8hxpz" podStartSLOduration=1.944731669 podStartE2EDuration="2.406699545s" podCreationTimestamp="2025-09-29 21:53:53 +0000 UTC" firstStartedPulling="2025-09-29 21:53:54.37845675 +0000 UTC m=+1712.355569411" lastFinishedPulling="2025-09-29 21:53:54.840424576 +0000 UTC m=+1712.817537287" observedRunningTime="2025-09-29 21:53:55.405970532 +0000 UTC m=+1713.383083253" watchObservedRunningTime="2025-09-29 21:53:55.406699545 +0000 UTC m=+1713.383812226" Sep 29 21:54:02 crc kubenswrapper[4911]: I0929 21:54:02.705710 4911 scope.go:117] "RemoveContainer" containerID="d13b753cc9aafc8d8aea3ea478f3d4351488778dd330e53fcc751897853d776a" Sep 29 21:54:02 crc kubenswrapper[4911]: E0929 21:54:02.706337 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w647f_openshift-machine-config-operator(50640abc-40db-4390-82d1-f3cfc76da71c)\"" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" Sep 29 21:54:17 crc kubenswrapper[4911]: I0929 21:54:17.701546 4911 scope.go:117] "RemoveContainer" containerID="d13b753cc9aafc8d8aea3ea478f3d4351488778dd330e53fcc751897853d776a" Sep 29 21:54:17 crc kubenswrapper[4911]: E0929 21:54:17.703534 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w647f_openshift-machine-config-operator(50640abc-40db-4390-82d1-f3cfc76da71c)\"" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" Sep 29 21:54:30 crc kubenswrapper[4911]: I0929 21:54:30.702289 4911 scope.go:117] "RemoveContainer" containerID="d13b753cc9aafc8d8aea3ea478f3d4351488778dd330e53fcc751897853d776a" Sep 29 21:54:30 crc kubenswrapper[4911]: E0929 21:54:30.703581 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w647f_openshift-machine-config-operator(50640abc-40db-4390-82d1-f3cfc76da71c)\"" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" Sep 29 21:54:34 crc kubenswrapper[4911]: I0929 21:54:34.051197 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-xfpnw"] Sep 29 21:54:34 crc kubenswrapper[4911]: I0929 21:54:34.060200 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-xfpnw"] Sep 29 21:54:34 crc kubenswrapper[4911]: I0929 21:54:34.710130 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8feb84b9-2d26-49c2-b4cd-6504c42773e6" path="/var/lib/kubelet/pods/8feb84b9-2d26-49c2-b4cd-6504c42773e6/volumes" Sep 29 21:54:37 crc kubenswrapper[4911]: I0929 21:54:37.762105 4911 scope.go:117] "RemoveContainer" containerID="8f60832b0ac2ad7edac27180158f9865b7ee0aa997d04e16793bce56d5f08973" Sep 29 21:54:37 crc kubenswrapper[4911]: I0929 21:54:37.838039 4911 scope.go:117] "RemoveContainer" containerID="842ee730a19ce701299917a4cd6d7897718662ab5879e435a4ae7295af1993db" Sep 29 21:54:37 crc kubenswrapper[4911]: I0929 21:54:37.884048 4911 scope.go:117] "RemoveContainer" containerID="755ed86710fd07765b52da1f63606bf3a3b4b7887b5407d7caa4a7a92f3fe11f" Sep 29 21:54:42 crc kubenswrapper[4911]: I0929 21:54:42.708762 4911 scope.go:117] "RemoveContainer" containerID="d13b753cc9aafc8d8aea3ea478f3d4351488778dd330e53fcc751897853d776a" Sep 29 21:54:42 crc kubenswrapper[4911]: E0929 21:54:42.709500 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w647f_openshift-machine-config-operator(50640abc-40db-4390-82d1-f3cfc76da71c)\"" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" Sep 29 21:54:53 crc kubenswrapper[4911]: I0929 21:54:53.994223 4911 generic.go:334] "Generic (PLEG): container finished" podID="e3e5ed96-035f-4645-a6b6-f92c01981ad4" containerID="a06fbde2f72466a10713a3803797c11c444f4923c7de4bcdb8aff0de59c7492a" exitCode=0 Sep 29 21:54:53 crc kubenswrapper[4911]: I0929 21:54:53.994290 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8hxpz" event={"ID":"e3e5ed96-035f-4645-a6b6-f92c01981ad4","Type":"ContainerDied","Data":"a06fbde2f72466a10713a3803797c11c444f4923c7de4bcdb8aff0de59c7492a"} Sep 29 21:54:55 crc kubenswrapper[4911]: I0929 21:54:55.475102 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8hxpz" Sep 29 21:54:55 crc kubenswrapper[4911]: I0929 21:54:55.663227 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e3e5ed96-035f-4645-a6b6-f92c01981ad4-ssh-key\") pod \"e3e5ed96-035f-4645-a6b6-f92c01981ad4\" (UID: \"e3e5ed96-035f-4645-a6b6-f92c01981ad4\") " Sep 29 21:54:55 crc kubenswrapper[4911]: I0929 21:54:55.663717 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-66fqs\" (UniqueName: \"kubernetes.io/projected/e3e5ed96-035f-4645-a6b6-f92c01981ad4-kube-api-access-66fqs\") pod \"e3e5ed96-035f-4645-a6b6-f92c01981ad4\" (UID: \"e3e5ed96-035f-4645-a6b6-f92c01981ad4\") " Sep 29 21:54:55 crc kubenswrapper[4911]: I0929 21:54:55.663930 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e3e5ed96-035f-4645-a6b6-f92c01981ad4-inventory\") pod \"e3e5ed96-035f-4645-a6b6-f92c01981ad4\" (UID: \"e3e5ed96-035f-4645-a6b6-f92c01981ad4\") " Sep 29 21:54:55 crc kubenswrapper[4911]: I0929 21:54:55.670913 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e3e5ed96-035f-4645-a6b6-f92c01981ad4-kube-api-access-66fqs" (OuterVolumeSpecName: "kube-api-access-66fqs") pod "e3e5ed96-035f-4645-a6b6-f92c01981ad4" (UID: "e3e5ed96-035f-4645-a6b6-f92c01981ad4"). InnerVolumeSpecName "kube-api-access-66fqs". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:54:55 crc kubenswrapper[4911]: I0929 21:54:55.692093 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e3e5ed96-035f-4645-a6b6-f92c01981ad4-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "e3e5ed96-035f-4645-a6b6-f92c01981ad4" (UID: "e3e5ed96-035f-4645-a6b6-f92c01981ad4"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:54:55 crc kubenswrapper[4911]: I0929 21:54:55.718249 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e3e5ed96-035f-4645-a6b6-f92c01981ad4-inventory" (OuterVolumeSpecName: "inventory") pod "e3e5ed96-035f-4645-a6b6-f92c01981ad4" (UID: "e3e5ed96-035f-4645-a6b6-f92c01981ad4"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:54:55 crc kubenswrapper[4911]: I0929 21:54:55.767336 4911 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e3e5ed96-035f-4645-a6b6-f92c01981ad4-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 21:54:55 crc kubenswrapper[4911]: I0929 21:54:55.767542 4911 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e3e5ed96-035f-4645-a6b6-f92c01981ad4-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 21:54:55 crc kubenswrapper[4911]: I0929 21:54:55.767663 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-66fqs\" (UniqueName: \"kubernetes.io/projected/e3e5ed96-035f-4645-a6b6-f92c01981ad4-kube-api-access-66fqs\") on node \"crc\" DevicePath \"\"" Sep 29 21:54:56 crc kubenswrapper[4911]: I0929 21:54:56.012336 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8hxpz" event={"ID":"e3e5ed96-035f-4645-a6b6-f92c01981ad4","Type":"ContainerDied","Data":"571b07b7922e4866e02c7b4499fe6bfa26d3a1e6503d9e6f43ce731cdf95a2a0"} Sep 29 21:54:56 crc kubenswrapper[4911]: I0929 21:54:56.012372 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="571b07b7922e4866e02c7b4499fe6bfa26d3a1e6503d9e6f43ce731cdf95a2a0" Sep 29 21:54:56 crc kubenswrapper[4911]: I0929 21:54:56.012394 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8hxpz" Sep 29 21:54:56 crc kubenswrapper[4911]: I0929 21:54:56.101715 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-ntqrx"] Sep 29 21:54:56 crc kubenswrapper[4911]: E0929 21:54:56.102075 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3e5ed96-035f-4645-a6b6-f92c01981ad4" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Sep 29 21:54:56 crc kubenswrapper[4911]: I0929 21:54:56.102093 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3e5ed96-035f-4645-a6b6-f92c01981ad4" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Sep 29 21:54:56 crc kubenswrapper[4911]: I0929 21:54:56.102275 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="e3e5ed96-035f-4645-a6b6-f92c01981ad4" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Sep 29 21:54:56 crc kubenswrapper[4911]: I0929 21:54:56.102929 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-ntqrx" Sep 29 21:54:56 crc kubenswrapper[4911]: I0929 21:54:56.104337 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-h4cgt" Sep 29 21:54:56 crc kubenswrapper[4911]: I0929 21:54:56.104656 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 21:54:56 crc kubenswrapper[4911]: I0929 21:54:56.105274 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 21:54:56 crc kubenswrapper[4911]: I0929 21:54:56.105405 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 21:54:56 crc kubenswrapper[4911]: I0929 21:54:56.118073 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-ntqrx"] Sep 29 21:54:56 crc kubenswrapper[4911]: I0929 21:54:56.276019 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/871d427c-7323-4980-aa4f-d9c835dd2d91-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-ntqrx\" (UID: \"871d427c-7323-4980-aa4f-d9c835dd2d91\") " pod="openstack/ssh-known-hosts-edpm-deployment-ntqrx" Sep 29 21:54:56 crc kubenswrapper[4911]: I0929 21:54:56.276150 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t2pwk\" (UniqueName: \"kubernetes.io/projected/871d427c-7323-4980-aa4f-d9c835dd2d91-kube-api-access-t2pwk\") pod \"ssh-known-hosts-edpm-deployment-ntqrx\" (UID: \"871d427c-7323-4980-aa4f-d9c835dd2d91\") " pod="openstack/ssh-known-hosts-edpm-deployment-ntqrx" Sep 29 21:54:56 crc kubenswrapper[4911]: I0929 21:54:56.276326 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/871d427c-7323-4980-aa4f-d9c835dd2d91-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-ntqrx\" (UID: \"871d427c-7323-4980-aa4f-d9c835dd2d91\") " pod="openstack/ssh-known-hosts-edpm-deployment-ntqrx" Sep 29 21:54:56 crc kubenswrapper[4911]: I0929 21:54:56.379058 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/871d427c-7323-4980-aa4f-d9c835dd2d91-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-ntqrx\" (UID: \"871d427c-7323-4980-aa4f-d9c835dd2d91\") " pod="openstack/ssh-known-hosts-edpm-deployment-ntqrx" Sep 29 21:54:56 crc kubenswrapper[4911]: I0929 21:54:56.379136 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t2pwk\" (UniqueName: \"kubernetes.io/projected/871d427c-7323-4980-aa4f-d9c835dd2d91-kube-api-access-t2pwk\") pod \"ssh-known-hosts-edpm-deployment-ntqrx\" (UID: \"871d427c-7323-4980-aa4f-d9c835dd2d91\") " pod="openstack/ssh-known-hosts-edpm-deployment-ntqrx" Sep 29 21:54:56 crc kubenswrapper[4911]: I0929 21:54:56.379219 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/871d427c-7323-4980-aa4f-d9c835dd2d91-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-ntqrx\" (UID: \"871d427c-7323-4980-aa4f-d9c835dd2d91\") " pod="openstack/ssh-known-hosts-edpm-deployment-ntqrx" Sep 29 21:54:56 crc kubenswrapper[4911]: I0929 21:54:56.388512 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/871d427c-7323-4980-aa4f-d9c835dd2d91-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-ntqrx\" (UID: \"871d427c-7323-4980-aa4f-d9c835dd2d91\") " pod="openstack/ssh-known-hosts-edpm-deployment-ntqrx" Sep 29 21:54:56 crc kubenswrapper[4911]: I0929 21:54:56.389362 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/871d427c-7323-4980-aa4f-d9c835dd2d91-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-ntqrx\" (UID: \"871d427c-7323-4980-aa4f-d9c835dd2d91\") " pod="openstack/ssh-known-hosts-edpm-deployment-ntqrx" Sep 29 21:54:56 crc kubenswrapper[4911]: I0929 21:54:56.411410 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t2pwk\" (UniqueName: \"kubernetes.io/projected/871d427c-7323-4980-aa4f-d9c835dd2d91-kube-api-access-t2pwk\") pod \"ssh-known-hosts-edpm-deployment-ntqrx\" (UID: \"871d427c-7323-4980-aa4f-d9c835dd2d91\") " pod="openstack/ssh-known-hosts-edpm-deployment-ntqrx" Sep 29 21:54:56 crc kubenswrapper[4911]: I0929 21:54:56.419511 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-ntqrx" Sep 29 21:54:57 crc kubenswrapper[4911]: I0929 21:54:57.046368 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-ntqrx"] Sep 29 21:54:57 crc kubenswrapper[4911]: I0929 21:54:57.052428 4911 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 21:54:57 crc kubenswrapper[4911]: I0929 21:54:57.702885 4911 scope.go:117] "RemoveContainer" containerID="d13b753cc9aafc8d8aea3ea478f3d4351488778dd330e53fcc751897853d776a" Sep 29 21:54:58 crc kubenswrapper[4911]: I0929 21:54:58.034911 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w647f" event={"ID":"50640abc-40db-4390-82d1-f3cfc76da71c","Type":"ContainerStarted","Data":"cf3902371ff18d62ab98dcaca1c35a41b574028c73c6cba4dcb8f735f395f50a"} Sep 29 21:54:58 crc kubenswrapper[4911]: I0929 21:54:58.036680 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-ntqrx" event={"ID":"871d427c-7323-4980-aa4f-d9c835dd2d91","Type":"ContainerStarted","Data":"53a023a8cc36da49bc978e31bfb82a9900e30d3f8e7c9d9cd474d0024913227f"} Sep 29 21:54:58 crc kubenswrapper[4911]: I0929 21:54:58.036705 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-ntqrx" event={"ID":"871d427c-7323-4980-aa4f-d9c835dd2d91","Type":"ContainerStarted","Data":"3bf23ed2c61609527bea1f62bdefb8448acbe4fe82ce4a3b76ceb6b757a5c0ae"} Sep 29 21:54:58 crc kubenswrapper[4911]: I0929 21:54:58.085384 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-ntqrx" podStartSLOduration=1.6179059759999999 podStartE2EDuration="2.085357903s" podCreationTimestamp="2025-09-29 21:54:56 +0000 UTC" firstStartedPulling="2025-09-29 21:54:57.05203605 +0000 UTC m=+1775.029148741" lastFinishedPulling="2025-09-29 21:54:57.519487967 +0000 UTC m=+1775.496600668" observedRunningTime="2025-09-29 21:54:58.07115952 +0000 UTC m=+1776.048272191" watchObservedRunningTime="2025-09-29 21:54:58.085357903 +0000 UTC m=+1776.062470574" Sep 29 21:55:06 crc kubenswrapper[4911]: I0929 21:55:06.120087 4911 generic.go:334] "Generic (PLEG): container finished" podID="871d427c-7323-4980-aa4f-d9c835dd2d91" containerID="53a023a8cc36da49bc978e31bfb82a9900e30d3f8e7c9d9cd474d0024913227f" exitCode=0 Sep 29 21:55:06 crc kubenswrapper[4911]: I0929 21:55:06.120210 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-ntqrx" event={"ID":"871d427c-7323-4980-aa4f-d9c835dd2d91","Type":"ContainerDied","Data":"53a023a8cc36da49bc978e31bfb82a9900e30d3f8e7c9d9cd474d0024913227f"} Sep 29 21:55:07 crc kubenswrapper[4911]: I0929 21:55:07.559716 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-ntqrx" Sep 29 21:55:07 crc kubenswrapper[4911]: I0929 21:55:07.706984 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/871d427c-7323-4980-aa4f-d9c835dd2d91-ssh-key-openstack-edpm-ipam\") pod \"871d427c-7323-4980-aa4f-d9c835dd2d91\" (UID: \"871d427c-7323-4980-aa4f-d9c835dd2d91\") " Sep 29 21:55:07 crc kubenswrapper[4911]: I0929 21:55:07.707176 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t2pwk\" (UniqueName: \"kubernetes.io/projected/871d427c-7323-4980-aa4f-d9c835dd2d91-kube-api-access-t2pwk\") pod \"871d427c-7323-4980-aa4f-d9c835dd2d91\" (UID: \"871d427c-7323-4980-aa4f-d9c835dd2d91\") " Sep 29 21:55:07 crc kubenswrapper[4911]: I0929 21:55:07.707260 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/871d427c-7323-4980-aa4f-d9c835dd2d91-inventory-0\") pod \"871d427c-7323-4980-aa4f-d9c835dd2d91\" (UID: \"871d427c-7323-4980-aa4f-d9c835dd2d91\") " Sep 29 21:55:07 crc kubenswrapper[4911]: I0929 21:55:07.712658 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/871d427c-7323-4980-aa4f-d9c835dd2d91-kube-api-access-t2pwk" (OuterVolumeSpecName: "kube-api-access-t2pwk") pod "871d427c-7323-4980-aa4f-d9c835dd2d91" (UID: "871d427c-7323-4980-aa4f-d9c835dd2d91"). InnerVolumeSpecName "kube-api-access-t2pwk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:55:07 crc kubenswrapper[4911]: I0929 21:55:07.734538 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/871d427c-7323-4980-aa4f-d9c835dd2d91-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "871d427c-7323-4980-aa4f-d9c835dd2d91" (UID: "871d427c-7323-4980-aa4f-d9c835dd2d91"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:55:07 crc kubenswrapper[4911]: I0929 21:55:07.764752 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/871d427c-7323-4980-aa4f-d9c835dd2d91-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "871d427c-7323-4980-aa4f-d9c835dd2d91" (UID: "871d427c-7323-4980-aa4f-d9c835dd2d91"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:55:07 crc kubenswrapper[4911]: I0929 21:55:07.811126 4911 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/871d427c-7323-4980-aa4f-d9c835dd2d91-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Sep 29 21:55:07 crc kubenswrapper[4911]: I0929 21:55:07.811155 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t2pwk\" (UniqueName: \"kubernetes.io/projected/871d427c-7323-4980-aa4f-d9c835dd2d91-kube-api-access-t2pwk\") on node \"crc\" DevicePath \"\"" Sep 29 21:55:07 crc kubenswrapper[4911]: I0929 21:55:07.811165 4911 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/871d427c-7323-4980-aa4f-d9c835dd2d91-inventory-0\") on node \"crc\" DevicePath \"\"" Sep 29 21:55:08 crc kubenswrapper[4911]: I0929 21:55:08.153256 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-ntqrx" event={"ID":"871d427c-7323-4980-aa4f-d9c835dd2d91","Type":"ContainerDied","Data":"3bf23ed2c61609527bea1f62bdefb8448acbe4fe82ce4a3b76ceb6b757a5c0ae"} Sep 29 21:55:08 crc kubenswrapper[4911]: I0929 21:55:08.153295 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3bf23ed2c61609527bea1f62bdefb8448acbe4fe82ce4a3b76ceb6b757a5c0ae" Sep 29 21:55:08 crc kubenswrapper[4911]: I0929 21:55:08.153344 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-ntqrx" Sep 29 21:55:08 crc kubenswrapper[4911]: I0929 21:55:08.281287 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-ddxcp"] Sep 29 21:55:08 crc kubenswrapper[4911]: E0929 21:55:08.281909 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="871d427c-7323-4980-aa4f-d9c835dd2d91" containerName="ssh-known-hosts-edpm-deployment" Sep 29 21:55:08 crc kubenswrapper[4911]: I0929 21:55:08.281924 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="871d427c-7323-4980-aa4f-d9c835dd2d91" containerName="ssh-known-hosts-edpm-deployment" Sep 29 21:55:08 crc kubenswrapper[4911]: I0929 21:55:08.282112 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="871d427c-7323-4980-aa4f-d9c835dd2d91" containerName="ssh-known-hosts-edpm-deployment" Sep 29 21:55:08 crc kubenswrapper[4911]: I0929 21:55:08.282698 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-ddxcp" Sep 29 21:55:08 crc kubenswrapper[4911]: I0929 21:55:08.289212 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 21:55:08 crc kubenswrapper[4911]: I0929 21:55:08.289546 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 21:55:08 crc kubenswrapper[4911]: I0929 21:55:08.289641 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 21:55:08 crc kubenswrapper[4911]: I0929 21:55:08.289987 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-ddxcp"] Sep 29 21:55:08 crc kubenswrapper[4911]: I0929 21:55:08.290119 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-h4cgt" Sep 29 21:55:08 crc kubenswrapper[4911]: I0929 21:55:08.423925 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/97ff1dd2-9857-4a1f-879f-741477ecc4a8-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-ddxcp\" (UID: \"97ff1dd2-9857-4a1f-879f-741477ecc4a8\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-ddxcp" Sep 29 21:55:08 crc kubenswrapper[4911]: I0929 21:55:08.423965 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hktbb\" (UniqueName: \"kubernetes.io/projected/97ff1dd2-9857-4a1f-879f-741477ecc4a8-kube-api-access-hktbb\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-ddxcp\" (UID: \"97ff1dd2-9857-4a1f-879f-741477ecc4a8\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-ddxcp" Sep 29 21:55:08 crc kubenswrapper[4911]: I0929 21:55:08.424065 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/97ff1dd2-9857-4a1f-879f-741477ecc4a8-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-ddxcp\" (UID: \"97ff1dd2-9857-4a1f-879f-741477ecc4a8\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-ddxcp" Sep 29 21:55:08 crc kubenswrapper[4911]: I0929 21:55:08.525688 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/97ff1dd2-9857-4a1f-879f-741477ecc4a8-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-ddxcp\" (UID: \"97ff1dd2-9857-4a1f-879f-741477ecc4a8\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-ddxcp" Sep 29 21:55:08 crc kubenswrapper[4911]: I0929 21:55:08.525762 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hktbb\" (UniqueName: \"kubernetes.io/projected/97ff1dd2-9857-4a1f-879f-741477ecc4a8-kube-api-access-hktbb\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-ddxcp\" (UID: \"97ff1dd2-9857-4a1f-879f-741477ecc4a8\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-ddxcp" Sep 29 21:55:08 crc kubenswrapper[4911]: I0929 21:55:08.526033 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/97ff1dd2-9857-4a1f-879f-741477ecc4a8-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-ddxcp\" (UID: \"97ff1dd2-9857-4a1f-879f-741477ecc4a8\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-ddxcp" Sep 29 21:55:08 crc kubenswrapper[4911]: I0929 21:55:08.534771 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/97ff1dd2-9857-4a1f-879f-741477ecc4a8-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-ddxcp\" (UID: \"97ff1dd2-9857-4a1f-879f-741477ecc4a8\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-ddxcp" Sep 29 21:55:08 crc kubenswrapper[4911]: I0929 21:55:08.536056 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/97ff1dd2-9857-4a1f-879f-741477ecc4a8-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-ddxcp\" (UID: \"97ff1dd2-9857-4a1f-879f-741477ecc4a8\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-ddxcp" Sep 29 21:55:08 crc kubenswrapper[4911]: I0929 21:55:08.548511 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hktbb\" (UniqueName: \"kubernetes.io/projected/97ff1dd2-9857-4a1f-879f-741477ecc4a8-kube-api-access-hktbb\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-ddxcp\" (UID: \"97ff1dd2-9857-4a1f-879f-741477ecc4a8\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-ddxcp" Sep 29 21:55:08 crc kubenswrapper[4911]: I0929 21:55:08.598394 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-ddxcp" Sep 29 21:55:09 crc kubenswrapper[4911]: I0929 21:55:09.014021 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-ddxcp"] Sep 29 21:55:09 crc kubenswrapper[4911]: I0929 21:55:09.164266 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-ddxcp" event={"ID":"97ff1dd2-9857-4a1f-879f-741477ecc4a8","Type":"ContainerStarted","Data":"293fb70fe775b82c2496654618ce40181012a39df396fcc7247b923e5e88a7c1"} Sep 29 21:55:10 crc kubenswrapper[4911]: I0929 21:55:10.182334 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-ddxcp" event={"ID":"97ff1dd2-9857-4a1f-879f-741477ecc4a8","Type":"ContainerStarted","Data":"5d0165ac6a4421916e6cecc1cb7adb3790d8b76d96eafec77367239190baa0d5"} Sep 29 21:55:10 crc kubenswrapper[4911]: I0929 21:55:10.206161 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-ddxcp" podStartSLOduration=1.7368126099999999 podStartE2EDuration="2.206136415s" podCreationTimestamp="2025-09-29 21:55:08 +0000 UTC" firstStartedPulling="2025-09-29 21:55:09.020627707 +0000 UTC m=+1786.997740378" lastFinishedPulling="2025-09-29 21:55:09.489951472 +0000 UTC m=+1787.467064183" observedRunningTime="2025-09-29 21:55:10.204454944 +0000 UTC m=+1788.181567645" watchObservedRunningTime="2025-09-29 21:55:10.206136415 +0000 UTC m=+1788.183249146" Sep 29 21:55:19 crc kubenswrapper[4911]: I0929 21:55:19.310178 4911 generic.go:334] "Generic (PLEG): container finished" podID="97ff1dd2-9857-4a1f-879f-741477ecc4a8" containerID="5d0165ac6a4421916e6cecc1cb7adb3790d8b76d96eafec77367239190baa0d5" exitCode=0 Sep 29 21:55:19 crc kubenswrapper[4911]: I0929 21:55:19.310319 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-ddxcp" event={"ID":"97ff1dd2-9857-4a1f-879f-741477ecc4a8","Type":"ContainerDied","Data":"5d0165ac6a4421916e6cecc1cb7adb3790d8b76d96eafec77367239190baa0d5"} Sep 29 21:55:20 crc kubenswrapper[4911]: I0929 21:55:20.771321 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-ddxcp" Sep 29 21:55:20 crc kubenswrapper[4911]: I0929 21:55:20.924444 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/97ff1dd2-9857-4a1f-879f-741477ecc4a8-ssh-key\") pod \"97ff1dd2-9857-4a1f-879f-741477ecc4a8\" (UID: \"97ff1dd2-9857-4a1f-879f-741477ecc4a8\") " Sep 29 21:55:20 crc kubenswrapper[4911]: I0929 21:55:20.924570 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/97ff1dd2-9857-4a1f-879f-741477ecc4a8-inventory\") pod \"97ff1dd2-9857-4a1f-879f-741477ecc4a8\" (UID: \"97ff1dd2-9857-4a1f-879f-741477ecc4a8\") " Sep 29 21:55:20 crc kubenswrapper[4911]: I0929 21:55:20.924623 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hktbb\" (UniqueName: \"kubernetes.io/projected/97ff1dd2-9857-4a1f-879f-741477ecc4a8-kube-api-access-hktbb\") pod \"97ff1dd2-9857-4a1f-879f-741477ecc4a8\" (UID: \"97ff1dd2-9857-4a1f-879f-741477ecc4a8\") " Sep 29 21:55:20 crc kubenswrapper[4911]: I0929 21:55:20.931229 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/97ff1dd2-9857-4a1f-879f-741477ecc4a8-kube-api-access-hktbb" (OuterVolumeSpecName: "kube-api-access-hktbb") pod "97ff1dd2-9857-4a1f-879f-741477ecc4a8" (UID: "97ff1dd2-9857-4a1f-879f-741477ecc4a8"). InnerVolumeSpecName "kube-api-access-hktbb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:55:20 crc kubenswrapper[4911]: I0929 21:55:20.955008 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/97ff1dd2-9857-4a1f-879f-741477ecc4a8-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "97ff1dd2-9857-4a1f-879f-741477ecc4a8" (UID: "97ff1dd2-9857-4a1f-879f-741477ecc4a8"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:55:20 crc kubenswrapper[4911]: I0929 21:55:20.963148 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/97ff1dd2-9857-4a1f-879f-741477ecc4a8-inventory" (OuterVolumeSpecName: "inventory") pod "97ff1dd2-9857-4a1f-879f-741477ecc4a8" (UID: "97ff1dd2-9857-4a1f-879f-741477ecc4a8"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:55:21 crc kubenswrapper[4911]: I0929 21:55:21.027340 4911 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/97ff1dd2-9857-4a1f-879f-741477ecc4a8-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 21:55:21 crc kubenswrapper[4911]: I0929 21:55:21.027400 4911 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/97ff1dd2-9857-4a1f-879f-741477ecc4a8-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 21:55:21 crc kubenswrapper[4911]: I0929 21:55:21.027423 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hktbb\" (UniqueName: \"kubernetes.io/projected/97ff1dd2-9857-4a1f-879f-741477ecc4a8-kube-api-access-hktbb\") on node \"crc\" DevicePath \"\"" Sep 29 21:55:21 crc kubenswrapper[4911]: I0929 21:55:21.333755 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-ddxcp" event={"ID":"97ff1dd2-9857-4a1f-879f-741477ecc4a8","Type":"ContainerDied","Data":"293fb70fe775b82c2496654618ce40181012a39df396fcc7247b923e5e88a7c1"} Sep 29 21:55:21 crc kubenswrapper[4911]: I0929 21:55:21.334362 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="293fb70fe775b82c2496654618ce40181012a39df396fcc7247b923e5e88a7c1" Sep 29 21:55:21 crc kubenswrapper[4911]: I0929 21:55:21.334033 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-ddxcp" Sep 29 21:55:21 crc kubenswrapper[4911]: I0929 21:55:21.427865 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ljflp"] Sep 29 21:55:21 crc kubenswrapper[4911]: E0929 21:55:21.428346 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97ff1dd2-9857-4a1f-879f-741477ecc4a8" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Sep 29 21:55:21 crc kubenswrapper[4911]: I0929 21:55:21.428367 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="97ff1dd2-9857-4a1f-879f-741477ecc4a8" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Sep 29 21:55:21 crc kubenswrapper[4911]: I0929 21:55:21.428605 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="97ff1dd2-9857-4a1f-879f-741477ecc4a8" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Sep 29 21:55:21 crc kubenswrapper[4911]: I0929 21:55:21.429372 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ljflp" Sep 29 21:55:21 crc kubenswrapper[4911]: I0929 21:55:21.433731 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 21:55:21 crc kubenswrapper[4911]: I0929 21:55:21.433889 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 21:55:21 crc kubenswrapper[4911]: I0929 21:55:21.434958 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/87480cd8-68c7-4315-ac1d-7c10d5fb6b79-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-ljflp\" (UID: \"87480cd8-68c7-4315-ac1d-7c10d5fb6b79\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ljflp" Sep 29 21:55:21 crc kubenswrapper[4911]: I0929 21:55:21.435098 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/87480cd8-68c7-4315-ac1d-7c10d5fb6b79-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-ljflp\" (UID: \"87480cd8-68c7-4315-ac1d-7c10d5fb6b79\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ljflp" Sep 29 21:55:21 crc kubenswrapper[4911]: I0929 21:55:21.435196 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cqv6k\" (UniqueName: \"kubernetes.io/projected/87480cd8-68c7-4315-ac1d-7c10d5fb6b79-kube-api-access-cqv6k\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-ljflp\" (UID: \"87480cd8-68c7-4315-ac1d-7c10d5fb6b79\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ljflp" Sep 29 21:55:21 crc kubenswrapper[4911]: I0929 21:55:21.436334 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 21:55:21 crc kubenswrapper[4911]: I0929 21:55:21.436385 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-h4cgt" Sep 29 21:55:21 crc kubenswrapper[4911]: I0929 21:55:21.445628 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ljflp"] Sep 29 21:55:21 crc kubenswrapper[4911]: I0929 21:55:21.537854 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/87480cd8-68c7-4315-ac1d-7c10d5fb6b79-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-ljflp\" (UID: \"87480cd8-68c7-4315-ac1d-7c10d5fb6b79\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ljflp" Sep 29 21:55:21 crc kubenswrapper[4911]: I0929 21:55:21.538013 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/87480cd8-68c7-4315-ac1d-7c10d5fb6b79-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-ljflp\" (UID: \"87480cd8-68c7-4315-ac1d-7c10d5fb6b79\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ljflp" Sep 29 21:55:21 crc kubenswrapper[4911]: I0929 21:55:21.538071 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqv6k\" (UniqueName: \"kubernetes.io/projected/87480cd8-68c7-4315-ac1d-7c10d5fb6b79-kube-api-access-cqv6k\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-ljflp\" (UID: \"87480cd8-68c7-4315-ac1d-7c10d5fb6b79\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ljflp" Sep 29 21:55:21 crc kubenswrapper[4911]: I0929 21:55:21.545142 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/87480cd8-68c7-4315-ac1d-7c10d5fb6b79-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-ljflp\" (UID: \"87480cd8-68c7-4315-ac1d-7c10d5fb6b79\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ljflp" Sep 29 21:55:21 crc kubenswrapper[4911]: I0929 21:55:21.545904 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/87480cd8-68c7-4315-ac1d-7c10d5fb6b79-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-ljflp\" (UID: \"87480cd8-68c7-4315-ac1d-7c10d5fb6b79\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ljflp" Sep 29 21:55:21 crc kubenswrapper[4911]: I0929 21:55:21.563672 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqv6k\" (UniqueName: \"kubernetes.io/projected/87480cd8-68c7-4315-ac1d-7c10d5fb6b79-kube-api-access-cqv6k\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-ljflp\" (UID: \"87480cd8-68c7-4315-ac1d-7c10d5fb6b79\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ljflp" Sep 29 21:55:21 crc kubenswrapper[4911]: I0929 21:55:21.762155 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ljflp" Sep 29 21:55:22 crc kubenswrapper[4911]: I0929 21:55:22.140966 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ljflp"] Sep 29 21:55:22 crc kubenswrapper[4911]: I0929 21:55:22.350491 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ljflp" event={"ID":"87480cd8-68c7-4315-ac1d-7c10d5fb6b79","Type":"ContainerStarted","Data":"8899ea633c9918b19b7b6e4dbf8d593946f02267da9690e5e984db080cde95b1"} Sep 29 21:55:23 crc kubenswrapper[4911]: I0929 21:55:23.362130 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ljflp" event={"ID":"87480cd8-68c7-4315-ac1d-7c10d5fb6b79","Type":"ContainerStarted","Data":"5092eeb3ea78296730032235e7710633de416c4dd19c8c192352b490bd814cc4"} Sep 29 21:55:23 crc kubenswrapper[4911]: I0929 21:55:23.390159 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ljflp" podStartSLOduration=1.976674711 podStartE2EDuration="2.390140674s" podCreationTimestamp="2025-09-29 21:55:21 +0000 UTC" firstStartedPulling="2025-09-29 21:55:22.152126528 +0000 UTC m=+1800.129239199" lastFinishedPulling="2025-09-29 21:55:22.565592491 +0000 UTC m=+1800.542705162" observedRunningTime="2025-09-29 21:55:23.388169293 +0000 UTC m=+1801.365282024" watchObservedRunningTime="2025-09-29 21:55:23.390140674 +0000 UTC m=+1801.367253355" Sep 29 21:55:33 crc kubenswrapper[4911]: I0929 21:55:33.475625 4911 generic.go:334] "Generic (PLEG): container finished" podID="87480cd8-68c7-4315-ac1d-7c10d5fb6b79" containerID="5092eeb3ea78296730032235e7710633de416c4dd19c8c192352b490bd814cc4" exitCode=0 Sep 29 21:55:33 crc kubenswrapper[4911]: I0929 21:55:33.475682 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ljflp" event={"ID":"87480cd8-68c7-4315-ac1d-7c10d5fb6b79","Type":"ContainerDied","Data":"5092eeb3ea78296730032235e7710633de416c4dd19c8c192352b490bd814cc4"} Sep 29 21:55:35 crc kubenswrapper[4911]: I0929 21:55:35.022493 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ljflp" Sep 29 21:55:35 crc kubenswrapper[4911]: I0929 21:55:35.059975 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/87480cd8-68c7-4315-ac1d-7c10d5fb6b79-ssh-key\") pod \"87480cd8-68c7-4315-ac1d-7c10d5fb6b79\" (UID: \"87480cd8-68c7-4315-ac1d-7c10d5fb6b79\") " Sep 29 21:55:35 crc kubenswrapper[4911]: I0929 21:55:35.060060 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/87480cd8-68c7-4315-ac1d-7c10d5fb6b79-inventory\") pod \"87480cd8-68c7-4315-ac1d-7c10d5fb6b79\" (UID: \"87480cd8-68c7-4315-ac1d-7c10d5fb6b79\") " Sep 29 21:55:35 crc kubenswrapper[4911]: I0929 21:55:35.060227 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cqv6k\" (UniqueName: \"kubernetes.io/projected/87480cd8-68c7-4315-ac1d-7c10d5fb6b79-kube-api-access-cqv6k\") pod \"87480cd8-68c7-4315-ac1d-7c10d5fb6b79\" (UID: \"87480cd8-68c7-4315-ac1d-7c10d5fb6b79\") " Sep 29 21:55:35 crc kubenswrapper[4911]: I0929 21:55:35.071072 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87480cd8-68c7-4315-ac1d-7c10d5fb6b79-kube-api-access-cqv6k" (OuterVolumeSpecName: "kube-api-access-cqv6k") pod "87480cd8-68c7-4315-ac1d-7c10d5fb6b79" (UID: "87480cd8-68c7-4315-ac1d-7c10d5fb6b79"). InnerVolumeSpecName "kube-api-access-cqv6k". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:55:35 crc kubenswrapper[4911]: I0929 21:55:35.096661 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87480cd8-68c7-4315-ac1d-7c10d5fb6b79-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "87480cd8-68c7-4315-ac1d-7c10d5fb6b79" (UID: "87480cd8-68c7-4315-ac1d-7c10d5fb6b79"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:55:35 crc kubenswrapper[4911]: I0929 21:55:35.110983 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87480cd8-68c7-4315-ac1d-7c10d5fb6b79-inventory" (OuterVolumeSpecName: "inventory") pod "87480cd8-68c7-4315-ac1d-7c10d5fb6b79" (UID: "87480cd8-68c7-4315-ac1d-7c10d5fb6b79"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:55:35 crc kubenswrapper[4911]: I0929 21:55:35.163411 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cqv6k\" (UniqueName: \"kubernetes.io/projected/87480cd8-68c7-4315-ac1d-7c10d5fb6b79-kube-api-access-cqv6k\") on node \"crc\" DevicePath \"\"" Sep 29 21:55:35 crc kubenswrapper[4911]: I0929 21:55:35.163706 4911 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/87480cd8-68c7-4315-ac1d-7c10d5fb6b79-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 21:55:35 crc kubenswrapper[4911]: I0929 21:55:35.163950 4911 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/87480cd8-68c7-4315-ac1d-7c10d5fb6b79-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 21:55:35 crc kubenswrapper[4911]: I0929 21:55:35.503103 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ljflp" event={"ID":"87480cd8-68c7-4315-ac1d-7c10d5fb6b79","Type":"ContainerDied","Data":"8899ea633c9918b19b7b6e4dbf8d593946f02267da9690e5e984db080cde95b1"} Sep 29 21:55:35 crc kubenswrapper[4911]: I0929 21:55:35.503780 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8899ea633c9918b19b7b6e4dbf8d593946f02267da9690e5e984db080cde95b1" Sep 29 21:55:35 crc kubenswrapper[4911]: I0929 21:55:35.503174 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ljflp" Sep 29 21:55:35 crc kubenswrapper[4911]: I0929 21:55:35.607304 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-slrkc"] Sep 29 21:55:35 crc kubenswrapper[4911]: E0929 21:55:35.607959 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="87480cd8-68c7-4315-ac1d-7c10d5fb6b79" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Sep 29 21:55:35 crc kubenswrapper[4911]: I0929 21:55:35.607988 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="87480cd8-68c7-4315-ac1d-7c10d5fb6b79" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Sep 29 21:55:35 crc kubenswrapper[4911]: I0929 21:55:35.608336 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="87480cd8-68c7-4315-ac1d-7c10d5fb6b79" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Sep 29 21:55:35 crc kubenswrapper[4911]: I0929 21:55:35.609221 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-slrkc" Sep 29 21:55:35 crc kubenswrapper[4911]: I0929 21:55:35.612148 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-neutron-metadata-default-certs-0" Sep 29 21:55:35 crc kubenswrapper[4911]: I0929 21:55:35.614306 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 21:55:35 crc kubenswrapper[4911]: I0929 21:55:35.615405 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-ovn-default-certs-0" Sep 29 21:55:35 crc kubenswrapper[4911]: I0929 21:55:35.615490 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 21:55:35 crc kubenswrapper[4911]: I0929 21:55:35.616027 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 21:55:35 crc kubenswrapper[4911]: I0929 21:55:35.616184 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-libvirt-default-certs-0" Sep 29 21:55:35 crc kubenswrapper[4911]: I0929 21:55:35.616231 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-h4cgt" Sep 29 21:55:35 crc kubenswrapper[4911]: I0929 21:55:35.616955 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-telemetry-default-certs-0" Sep 29 21:55:35 crc kubenswrapper[4911]: I0929 21:55:35.632601 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-slrkc"] Sep 29 21:55:35 crc kubenswrapper[4911]: I0929 21:55:35.677863 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ca413103-e15f-4ee7-94fa-3f402c7393b2-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-slrkc\" (UID: \"ca413103-e15f-4ee7-94fa-3f402c7393b2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-slrkc" Sep 29 21:55:35 crc kubenswrapper[4911]: I0929 21:55:35.678049 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca413103-e15f-4ee7-94fa-3f402c7393b2-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-slrkc\" (UID: \"ca413103-e15f-4ee7-94fa-3f402c7393b2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-slrkc" Sep 29 21:55:35 crc kubenswrapper[4911]: I0929 21:55:35.678131 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca413103-e15f-4ee7-94fa-3f402c7393b2-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-slrkc\" (UID: \"ca413103-e15f-4ee7-94fa-3f402c7393b2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-slrkc" Sep 29 21:55:35 crc kubenswrapper[4911]: I0929 21:55:35.678165 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ca413103-e15f-4ee7-94fa-3f402c7393b2-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-slrkc\" (UID: \"ca413103-e15f-4ee7-94fa-3f402c7393b2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-slrkc" Sep 29 21:55:35 crc kubenswrapper[4911]: I0929 21:55:35.678189 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca413103-e15f-4ee7-94fa-3f402c7393b2-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-slrkc\" (UID: \"ca413103-e15f-4ee7-94fa-3f402c7393b2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-slrkc" Sep 29 21:55:35 crc kubenswrapper[4911]: I0929 21:55:35.678263 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ca413103-e15f-4ee7-94fa-3f402c7393b2-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-slrkc\" (UID: \"ca413103-e15f-4ee7-94fa-3f402c7393b2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-slrkc" Sep 29 21:55:35 crc kubenswrapper[4911]: I0929 21:55:35.678434 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ca413103-e15f-4ee7-94fa-3f402c7393b2-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-slrkc\" (UID: \"ca413103-e15f-4ee7-94fa-3f402c7393b2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-slrkc" Sep 29 21:55:35 crc kubenswrapper[4911]: I0929 21:55:35.678469 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ca413103-e15f-4ee7-94fa-3f402c7393b2-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-slrkc\" (UID: \"ca413103-e15f-4ee7-94fa-3f402c7393b2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-slrkc" Sep 29 21:55:35 crc kubenswrapper[4911]: I0929 21:55:35.678491 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ca413103-e15f-4ee7-94fa-3f402c7393b2-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-slrkc\" (UID: \"ca413103-e15f-4ee7-94fa-3f402c7393b2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-slrkc" Sep 29 21:55:35 crc kubenswrapper[4911]: I0929 21:55:35.678534 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca413103-e15f-4ee7-94fa-3f402c7393b2-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-slrkc\" (UID: \"ca413103-e15f-4ee7-94fa-3f402c7393b2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-slrkc" Sep 29 21:55:35 crc kubenswrapper[4911]: I0929 21:55:35.678570 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca413103-e15f-4ee7-94fa-3f402c7393b2-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-slrkc\" (UID: \"ca413103-e15f-4ee7-94fa-3f402c7393b2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-slrkc" Sep 29 21:55:35 crc kubenswrapper[4911]: I0929 21:55:35.678596 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca413103-e15f-4ee7-94fa-3f402c7393b2-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-slrkc\" (UID: \"ca413103-e15f-4ee7-94fa-3f402c7393b2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-slrkc" Sep 29 21:55:35 crc kubenswrapper[4911]: I0929 21:55:35.678615 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hqk2d\" (UniqueName: \"kubernetes.io/projected/ca413103-e15f-4ee7-94fa-3f402c7393b2-kube-api-access-hqk2d\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-slrkc\" (UID: \"ca413103-e15f-4ee7-94fa-3f402c7393b2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-slrkc" Sep 29 21:55:35 crc kubenswrapper[4911]: I0929 21:55:35.678645 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca413103-e15f-4ee7-94fa-3f402c7393b2-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-slrkc\" (UID: \"ca413103-e15f-4ee7-94fa-3f402c7393b2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-slrkc" Sep 29 21:55:35 crc kubenswrapper[4911]: I0929 21:55:35.780137 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ca413103-e15f-4ee7-94fa-3f402c7393b2-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-slrkc\" (UID: \"ca413103-e15f-4ee7-94fa-3f402c7393b2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-slrkc" Sep 29 21:55:35 crc kubenswrapper[4911]: I0929 21:55:35.780209 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca413103-e15f-4ee7-94fa-3f402c7393b2-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-slrkc\" (UID: \"ca413103-e15f-4ee7-94fa-3f402c7393b2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-slrkc" Sep 29 21:55:35 crc kubenswrapper[4911]: I0929 21:55:35.780259 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca413103-e15f-4ee7-94fa-3f402c7393b2-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-slrkc\" (UID: \"ca413103-e15f-4ee7-94fa-3f402c7393b2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-slrkc" Sep 29 21:55:35 crc kubenswrapper[4911]: I0929 21:55:35.780285 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ca413103-e15f-4ee7-94fa-3f402c7393b2-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-slrkc\" (UID: \"ca413103-e15f-4ee7-94fa-3f402c7393b2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-slrkc" Sep 29 21:55:35 crc kubenswrapper[4911]: I0929 21:55:35.780306 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca413103-e15f-4ee7-94fa-3f402c7393b2-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-slrkc\" (UID: \"ca413103-e15f-4ee7-94fa-3f402c7393b2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-slrkc" Sep 29 21:55:35 crc kubenswrapper[4911]: I0929 21:55:35.780325 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ca413103-e15f-4ee7-94fa-3f402c7393b2-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-slrkc\" (UID: \"ca413103-e15f-4ee7-94fa-3f402c7393b2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-slrkc" Sep 29 21:55:35 crc kubenswrapper[4911]: I0929 21:55:35.780346 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ca413103-e15f-4ee7-94fa-3f402c7393b2-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-slrkc\" (UID: \"ca413103-e15f-4ee7-94fa-3f402c7393b2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-slrkc" Sep 29 21:55:35 crc kubenswrapper[4911]: I0929 21:55:35.780364 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ca413103-e15f-4ee7-94fa-3f402c7393b2-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-slrkc\" (UID: \"ca413103-e15f-4ee7-94fa-3f402c7393b2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-slrkc" Sep 29 21:55:35 crc kubenswrapper[4911]: I0929 21:55:35.780380 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ca413103-e15f-4ee7-94fa-3f402c7393b2-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-slrkc\" (UID: \"ca413103-e15f-4ee7-94fa-3f402c7393b2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-slrkc" Sep 29 21:55:35 crc kubenswrapper[4911]: I0929 21:55:35.780410 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca413103-e15f-4ee7-94fa-3f402c7393b2-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-slrkc\" (UID: \"ca413103-e15f-4ee7-94fa-3f402c7393b2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-slrkc" Sep 29 21:55:35 crc kubenswrapper[4911]: I0929 21:55:35.780438 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca413103-e15f-4ee7-94fa-3f402c7393b2-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-slrkc\" (UID: \"ca413103-e15f-4ee7-94fa-3f402c7393b2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-slrkc" Sep 29 21:55:35 crc kubenswrapper[4911]: I0929 21:55:35.780458 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca413103-e15f-4ee7-94fa-3f402c7393b2-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-slrkc\" (UID: \"ca413103-e15f-4ee7-94fa-3f402c7393b2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-slrkc" Sep 29 21:55:35 crc kubenswrapper[4911]: I0929 21:55:35.780473 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hqk2d\" (UniqueName: \"kubernetes.io/projected/ca413103-e15f-4ee7-94fa-3f402c7393b2-kube-api-access-hqk2d\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-slrkc\" (UID: \"ca413103-e15f-4ee7-94fa-3f402c7393b2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-slrkc" Sep 29 21:55:35 crc kubenswrapper[4911]: I0929 21:55:35.780493 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca413103-e15f-4ee7-94fa-3f402c7393b2-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-slrkc\" (UID: \"ca413103-e15f-4ee7-94fa-3f402c7393b2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-slrkc" Sep 29 21:55:35 crc kubenswrapper[4911]: I0929 21:55:35.788069 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca413103-e15f-4ee7-94fa-3f402c7393b2-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-slrkc\" (UID: \"ca413103-e15f-4ee7-94fa-3f402c7393b2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-slrkc" Sep 29 21:55:35 crc kubenswrapper[4911]: I0929 21:55:35.788072 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca413103-e15f-4ee7-94fa-3f402c7393b2-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-slrkc\" (UID: \"ca413103-e15f-4ee7-94fa-3f402c7393b2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-slrkc" Sep 29 21:55:35 crc kubenswrapper[4911]: I0929 21:55:35.788498 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca413103-e15f-4ee7-94fa-3f402c7393b2-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-slrkc\" (UID: \"ca413103-e15f-4ee7-94fa-3f402c7393b2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-slrkc" Sep 29 21:55:35 crc kubenswrapper[4911]: I0929 21:55:35.790698 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca413103-e15f-4ee7-94fa-3f402c7393b2-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-slrkc\" (UID: \"ca413103-e15f-4ee7-94fa-3f402c7393b2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-slrkc" Sep 29 21:55:35 crc kubenswrapper[4911]: I0929 21:55:35.791206 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca413103-e15f-4ee7-94fa-3f402c7393b2-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-slrkc\" (UID: \"ca413103-e15f-4ee7-94fa-3f402c7393b2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-slrkc" Sep 29 21:55:35 crc kubenswrapper[4911]: I0929 21:55:35.793113 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca413103-e15f-4ee7-94fa-3f402c7393b2-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-slrkc\" (UID: \"ca413103-e15f-4ee7-94fa-3f402c7393b2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-slrkc" Sep 29 21:55:35 crc kubenswrapper[4911]: I0929 21:55:35.793442 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ca413103-e15f-4ee7-94fa-3f402c7393b2-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-slrkc\" (UID: \"ca413103-e15f-4ee7-94fa-3f402c7393b2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-slrkc" Sep 29 21:55:35 crc kubenswrapper[4911]: I0929 21:55:35.795499 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ca413103-e15f-4ee7-94fa-3f402c7393b2-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-slrkc\" (UID: \"ca413103-e15f-4ee7-94fa-3f402c7393b2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-slrkc" Sep 29 21:55:35 crc kubenswrapper[4911]: I0929 21:55:35.795670 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ca413103-e15f-4ee7-94fa-3f402c7393b2-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-slrkc\" (UID: \"ca413103-e15f-4ee7-94fa-3f402c7393b2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-slrkc" Sep 29 21:55:35 crc kubenswrapper[4911]: I0929 21:55:35.796000 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca413103-e15f-4ee7-94fa-3f402c7393b2-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-slrkc\" (UID: \"ca413103-e15f-4ee7-94fa-3f402c7393b2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-slrkc" Sep 29 21:55:35 crc kubenswrapper[4911]: I0929 21:55:35.797467 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ca413103-e15f-4ee7-94fa-3f402c7393b2-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-slrkc\" (UID: \"ca413103-e15f-4ee7-94fa-3f402c7393b2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-slrkc" Sep 29 21:55:35 crc kubenswrapper[4911]: I0929 21:55:35.802510 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ca413103-e15f-4ee7-94fa-3f402c7393b2-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-slrkc\" (UID: \"ca413103-e15f-4ee7-94fa-3f402c7393b2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-slrkc" Sep 29 21:55:35 crc kubenswrapper[4911]: I0929 21:55:35.809434 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ca413103-e15f-4ee7-94fa-3f402c7393b2-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-slrkc\" (UID: \"ca413103-e15f-4ee7-94fa-3f402c7393b2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-slrkc" Sep 29 21:55:35 crc kubenswrapper[4911]: I0929 21:55:35.810812 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hqk2d\" (UniqueName: \"kubernetes.io/projected/ca413103-e15f-4ee7-94fa-3f402c7393b2-kube-api-access-hqk2d\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-slrkc\" (UID: \"ca413103-e15f-4ee7-94fa-3f402c7393b2\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-slrkc" Sep 29 21:55:35 crc kubenswrapper[4911]: I0929 21:55:35.933954 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-slrkc" Sep 29 21:55:36 crc kubenswrapper[4911]: I0929 21:55:36.512157 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-slrkc"] Sep 29 21:55:37 crc kubenswrapper[4911]: I0929 21:55:37.521920 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-slrkc" event={"ID":"ca413103-e15f-4ee7-94fa-3f402c7393b2","Type":"ContainerStarted","Data":"dd62dff4db2db56a0b6f2e73fce3d1163c68cb17ae48a5d33af7e21207481aac"} Sep 29 21:55:37 crc kubenswrapper[4911]: I0929 21:55:37.522276 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-slrkc" event={"ID":"ca413103-e15f-4ee7-94fa-3f402c7393b2","Type":"ContainerStarted","Data":"02723dce6dc2f685ac54af82536ec782e64abb07bfd4cc56fdd54c4f41d4439a"} Sep 29 21:55:37 crc kubenswrapper[4911]: I0929 21:55:37.552556 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-slrkc" podStartSLOduration=1.991921818 podStartE2EDuration="2.55253492s" podCreationTimestamp="2025-09-29 21:55:35 +0000 UTC" firstStartedPulling="2025-09-29 21:55:36.526561357 +0000 UTC m=+1814.503674028" lastFinishedPulling="2025-09-29 21:55:37.087174459 +0000 UTC m=+1815.064287130" observedRunningTime="2025-09-29 21:55:37.544947744 +0000 UTC m=+1815.522060415" watchObservedRunningTime="2025-09-29 21:55:37.55253492 +0000 UTC m=+1815.529647591" Sep 29 21:56:22 crc kubenswrapper[4911]: I0929 21:56:22.042846 4911 generic.go:334] "Generic (PLEG): container finished" podID="ca413103-e15f-4ee7-94fa-3f402c7393b2" containerID="dd62dff4db2db56a0b6f2e73fce3d1163c68cb17ae48a5d33af7e21207481aac" exitCode=0 Sep 29 21:56:22 crc kubenswrapper[4911]: I0929 21:56:22.042951 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-slrkc" event={"ID":"ca413103-e15f-4ee7-94fa-3f402c7393b2","Type":"ContainerDied","Data":"dd62dff4db2db56a0b6f2e73fce3d1163c68cb17ae48a5d33af7e21207481aac"} Sep 29 21:56:23 crc kubenswrapper[4911]: I0929 21:56:23.514535 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-slrkc" Sep 29 21:56:23 crc kubenswrapper[4911]: I0929 21:56:23.622258 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca413103-e15f-4ee7-94fa-3f402c7393b2-nova-combined-ca-bundle\") pod \"ca413103-e15f-4ee7-94fa-3f402c7393b2\" (UID: \"ca413103-e15f-4ee7-94fa-3f402c7393b2\") " Sep 29 21:56:23 crc kubenswrapper[4911]: I0929 21:56:23.622341 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hqk2d\" (UniqueName: \"kubernetes.io/projected/ca413103-e15f-4ee7-94fa-3f402c7393b2-kube-api-access-hqk2d\") pod \"ca413103-e15f-4ee7-94fa-3f402c7393b2\" (UID: \"ca413103-e15f-4ee7-94fa-3f402c7393b2\") " Sep 29 21:56:23 crc kubenswrapper[4911]: I0929 21:56:23.622434 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ca413103-e15f-4ee7-94fa-3f402c7393b2-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"ca413103-e15f-4ee7-94fa-3f402c7393b2\" (UID: \"ca413103-e15f-4ee7-94fa-3f402c7393b2\") " Sep 29 21:56:23 crc kubenswrapper[4911]: I0929 21:56:23.622492 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ca413103-e15f-4ee7-94fa-3f402c7393b2-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"ca413103-e15f-4ee7-94fa-3f402c7393b2\" (UID: \"ca413103-e15f-4ee7-94fa-3f402c7393b2\") " Sep 29 21:56:23 crc kubenswrapper[4911]: I0929 21:56:23.622910 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca413103-e15f-4ee7-94fa-3f402c7393b2-repo-setup-combined-ca-bundle\") pod \"ca413103-e15f-4ee7-94fa-3f402c7393b2\" (UID: \"ca413103-e15f-4ee7-94fa-3f402c7393b2\") " Sep 29 21:56:23 crc kubenswrapper[4911]: I0929 21:56:23.623001 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ca413103-e15f-4ee7-94fa-3f402c7393b2-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"ca413103-e15f-4ee7-94fa-3f402c7393b2\" (UID: \"ca413103-e15f-4ee7-94fa-3f402c7393b2\") " Sep 29 21:56:23 crc kubenswrapper[4911]: I0929 21:56:23.623049 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca413103-e15f-4ee7-94fa-3f402c7393b2-neutron-metadata-combined-ca-bundle\") pod \"ca413103-e15f-4ee7-94fa-3f402c7393b2\" (UID: \"ca413103-e15f-4ee7-94fa-3f402c7393b2\") " Sep 29 21:56:23 crc kubenswrapper[4911]: I0929 21:56:23.623097 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca413103-e15f-4ee7-94fa-3f402c7393b2-ovn-combined-ca-bundle\") pod \"ca413103-e15f-4ee7-94fa-3f402c7393b2\" (UID: \"ca413103-e15f-4ee7-94fa-3f402c7393b2\") " Sep 29 21:56:23 crc kubenswrapper[4911]: I0929 21:56:23.623153 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ca413103-e15f-4ee7-94fa-3f402c7393b2-inventory\") pod \"ca413103-e15f-4ee7-94fa-3f402c7393b2\" (UID: \"ca413103-e15f-4ee7-94fa-3f402c7393b2\") " Sep 29 21:56:23 crc kubenswrapper[4911]: I0929 21:56:23.623207 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca413103-e15f-4ee7-94fa-3f402c7393b2-bootstrap-combined-ca-bundle\") pod \"ca413103-e15f-4ee7-94fa-3f402c7393b2\" (UID: \"ca413103-e15f-4ee7-94fa-3f402c7393b2\") " Sep 29 21:56:23 crc kubenswrapper[4911]: I0929 21:56:23.623254 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca413103-e15f-4ee7-94fa-3f402c7393b2-libvirt-combined-ca-bundle\") pod \"ca413103-e15f-4ee7-94fa-3f402c7393b2\" (UID: \"ca413103-e15f-4ee7-94fa-3f402c7393b2\") " Sep 29 21:56:23 crc kubenswrapper[4911]: I0929 21:56:23.623295 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ca413103-e15f-4ee7-94fa-3f402c7393b2-openstack-edpm-ipam-ovn-default-certs-0\") pod \"ca413103-e15f-4ee7-94fa-3f402c7393b2\" (UID: \"ca413103-e15f-4ee7-94fa-3f402c7393b2\") " Sep 29 21:56:23 crc kubenswrapper[4911]: I0929 21:56:23.623335 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca413103-e15f-4ee7-94fa-3f402c7393b2-telemetry-combined-ca-bundle\") pod \"ca413103-e15f-4ee7-94fa-3f402c7393b2\" (UID: \"ca413103-e15f-4ee7-94fa-3f402c7393b2\") " Sep 29 21:56:23 crc kubenswrapper[4911]: I0929 21:56:23.623396 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ca413103-e15f-4ee7-94fa-3f402c7393b2-ssh-key\") pod \"ca413103-e15f-4ee7-94fa-3f402c7393b2\" (UID: \"ca413103-e15f-4ee7-94fa-3f402c7393b2\") " Sep 29 21:56:23 crc kubenswrapper[4911]: I0929 21:56:23.629576 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ca413103-e15f-4ee7-94fa-3f402c7393b2-openstack-edpm-ipam-neutron-metadata-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-neutron-metadata-default-certs-0") pod "ca413103-e15f-4ee7-94fa-3f402c7393b2" (UID: "ca413103-e15f-4ee7-94fa-3f402c7393b2"). InnerVolumeSpecName "openstack-edpm-ipam-neutron-metadata-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:56:23 crc kubenswrapper[4911]: I0929 21:56:23.630651 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca413103-e15f-4ee7-94fa-3f402c7393b2-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "ca413103-e15f-4ee7-94fa-3f402c7393b2" (UID: "ca413103-e15f-4ee7-94fa-3f402c7393b2"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:56:23 crc kubenswrapper[4911]: I0929 21:56:23.630653 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ca413103-e15f-4ee7-94fa-3f402c7393b2-openstack-edpm-ipam-ovn-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-ovn-default-certs-0") pod "ca413103-e15f-4ee7-94fa-3f402c7393b2" (UID: "ca413103-e15f-4ee7-94fa-3f402c7393b2"). InnerVolumeSpecName "openstack-edpm-ipam-ovn-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:56:23 crc kubenswrapper[4911]: I0929 21:56:23.631467 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca413103-e15f-4ee7-94fa-3f402c7393b2-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "ca413103-e15f-4ee7-94fa-3f402c7393b2" (UID: "ca413103-e15f-4ee7-94fa-3f402c7393b2"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:56:23 crc kubenswrapper[4911]: I0929 21:56:23.631646 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca413103-e15f-4ee7-94fa-3f402c7393b2-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "ca413103-e15f-4ee7-94fa-3f402c7393b2" (UID: "ca413103-e15f-4ee7-94fa-3f402c7393b2"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:56:23 crc kubenswrapper[4911]: I0929 21:56:23.632685 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ca413103-e15f-4ee7-94fa-3f402c7393b2-openstack-edpm-ipam-libvirt-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-libvirt-default-certs-0") pod "ca413103-e15f-4ee7-94fa-3f402c7393b2" (UID: "ca413103-e15f-4ee7-94fa-3f402c7393b2"). InnerVolumeSpecName "openstack-edpm-ipam-libvirt-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:56:23 crc kubenswrapper[4911]: I0929 21:56:23.633397 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ca413103-e15f-4ee7-94fa-3f402c7393b2-openstack-edpm-ipam-telemetry-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-telemetry-default-certs-0") pod "ca413103-e15f-4ee7-94fa-3f402c7393b2" (UID: "ca413103-e15f-4ee7-94fa-3f402c7393b2"). InnerVolumeSpecName "openstack-edpm-ipam-telemetry-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:56:23 crc kubenswrapper[4911]: I0929 21:56:23.633722 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca413103-e15f-4ee7-94fa-3f402c7393b2-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "ca413103-e15f-4ee7-94fa-3f402c7393b2" (UID: "ca413103-e15f-4ee7-94fa-3f402c7393b2"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:56:23 crc kubenswrapper[4911]: I0929 21:56:23.634376 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca413103-e15f-4ee7-94fa-3f402c7393b2-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "ca413103-e15f-4ee7-94fa-3f402c7393b2" (UID: "ca413103-e15f-4ee7-94fa-3f402c7393b2"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:56:23 crc kubenswrapper[4911]: I0929 21:56:23.635613 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ca413103-e15f-4ee7-94fa-3f402c7393b2-kube-api-access-hqk2d" (OuterVolumeSpecName: "kube-api-access-hqk2d") pod "ca413103-e15f-4ee7-94fa-3f402c7393b2" (UID: "ca413103-e15f-4ee7-94fa-3f402c7393b2"). InnerVolumeSpecName "kube-api-access-hqk2d". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:56:23 crc kubenswrapper[4911]: I0929 21:56:23.635721 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca413103-e15f-4ee7-94fa-3f402c7393b2-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "ca413103-e15f-4ee7-94fa-3f402c7393b2" (UID: "ca413103-e15f-4ee7-94fa-3f402c7393b2"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:56:23 crc kubenswrapper[4911]: I0929 21:56:23.635884 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca413103-e15f-4ee7-94fa-3f402c7393b2-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "ca413103-e15f-4ee7-94fa-3f402c7393b2" (UID: "ca413103-e15f-4ee7-94fa-3f402c7393b2"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:56:23 crc kubenswrapper[4911]: I0929 21:56:23.659907 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca413103-e15f-4ee7-94fa-3f402c7393b2-inventory" (OuterVolumeSpecName: "inventory") pod "ca413103-e15f-4ee7-94fa-3f402c7393b2" (UID: "ca413103-e15f-4ee7-94fa-3f402c7393b2"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:56:23 crc kubenswrapper[4911]: I0929 21:56:23.675047 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca413103-e15f-4ee7-94fa-3f402c7393b2-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "ca413103-e15f-4ee7-94fa-3f402c7393b2" (UID: "ca413103-e15f-4ee7-94fa-3f402c7393b2"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:56:23 crc kubenswrapper[4911]: I0929 21:56:23.727906 4911 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca413103-e15f-4ee7-94fa-3f402c7393b2-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 21:56:23 crc kubenswrapper[4911]: I0929 21:56:23.727947 4911 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca413103-e15f-4ee7-94fa-3f402c7393b2-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 21:56:23 crc kubenswrapper[4911]: I0929 21:56:23.727962 4911 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ca413103-e15f-4ee7-94fa-3f402c7393b2-openstack-edpm-ipam-ovn-default-certs-0\") on node \"crc\" DevicePath \"\"" Sep 29 21:56:23 crc kubenswrapper[4911]: I0929 21:56:23.727981 4911 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca413103-e15f-4ee7-94fa-3f402c7393b2-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 21:56:23 crc kubenswrapper[4911]: I0929 21:56:23.727998 4911 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ca413103-e15f-4ee7-94fa-3f402c7393b2-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 21:56:23 crc kubenswrapper[4911]: I0929 21:56:23.728014 4911 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca413103-e15f-4ee7-94fa-3f402c7393b2-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 21:56:23 crc kubenswrapper[4911]: I0929 21:56:23.728030 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hqk2d\" (UniqueName: \"kubernetes.io/projected/ca413103-e15f-4ee7-94fa-3f402c7393b2-kube-api-access-hqk2d\") on node \"crc\" DevicePath \"\"" Sep 29 21:56:23 crc kubenswrapper[4911]: I0929 21:56:23.728045 4911 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ca413103-e15f-4ee7-94fa-3f402c7393b2-openstack-edpm-ipam-telemetry-default-certs-0\") on node \"crc\" DevicePath \"\"" Sep 29 21:56:23 crc kubenswrapper[4911]: I0929 21:56:23.728058 4911 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ca413103-e15f-4ee7-94fa-3f402c7393b2-openstack-edpm-ipam-libvirt-default-certs-0\") on node \"crc\" DevicePath \"\"" Sep 29 21:56:23 crc kubenswrapper[4911]: I0929 21:56:23.728071 4911 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca413103-e15f-4ee7-94fa-3f402c7393b2-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 21:56:23 crc kubenswrapper[4911]: I0929 21:56:23.728087 4911 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ca413103-e15f-4ee7-94fa-3f402c7393b2-openstack-edpm-ipam-neutron-metadata-default-certs-0\") on node \"crc\" DevicePath \"\"" Sep 29 21:56:23 crc kubenswrapper[4911]: I0929 21:56:23.728104 4911 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca413103-e15f-4ee7-94fa-3f402c7393b2-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 21:56:23 crc kubenswrapper[4911]: I0929 21:56:23.728116 4911 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca413103-e15f-4ee7-94fa-3f402c7393b2-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 21:56:23 crc kubenswrapper[4911]: I0929 21:56:23.728130 4911 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ca413103-e15f-4ee7-94fa-3f402c7393b2-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 21:56:24 crc kubenswrapper[4911]: I0929 21:56:24.070657 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-slrkc" event={"ID":"ca413103-e15f-4ee7-94fa-3f402c7393b2","Type":"ContainerDied","Data":"02723dce6dc2f685ac54af82536ec782e64abb07bfd4cc56fdd54c4f41d4439a"} Sep 29 21:56:24 crc kubenswrapper[4911]: I0929 21:56:24.070713 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="02723dce6dc2f685ac54af82536ec782e64abb07bfd4cc56fdd54c4f41d4439a" Sep 29 21:56:24 crc kubenswrapper[4911]: I0929 21:56:24.070760 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-slrkc" Sep 29 21:56:24 crc kubenswrapper[4911]: I0929 21:56:24.247917 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-d6sx5"] Sep 29 21:56:24 crc kubenswrapper[4911]: E0929 21:56:24.248476 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca413103-e15f-4ee7-94fa-3f402c7393b2" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Sep 29 21:56:24 crc kubenswrapper[4911]: I0929 21:56:24.248504 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca413103-e15f-4ee7-94fa-3f402c7393b2" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Sep 29 21:56:24 crc kubenswrapper[4911]: I0929 21:56:24.248758 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="ca413103-e15f-4ee7-94fa-3f402c7393b2" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Sep 29 21:56:24 crc kubenswrapper[4911]: I0929 21:56:24.249745 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-d6sx5" Sep 29 21:56:24 crc kubenswrapper[4911]: I0929 21:56:24.253814 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 21:56:24 crc kubenswrapper[4911]: I0929 21:56:24.253885 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 21:56:24 crc kubenswrapper[4911]: I0929 21:56:24.253825 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Sep 29 21:56:24 crc kubenswrapper[4911]: I0929 21:56:24.253928 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-h4cgt" Sep 29 21:56:24 crc kubenswrapper[4911]: I0929 21:56:24.254206 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 21:56:24 crc kubenswrapper[4911]: I0929 21:56:24.261465 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-d6sx5"] Sep 29 21:56:24 crc kubenswrapper[4911]: I0929 21:56:24.342172 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/9cecf675-656c-4eab-97c6-7fbd57ee26e8-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-d6sx5\" (UID: \"9cecf675-656c-4eab-97c6-7fbd57ee26e8\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-d6sx5" Sep 29 21:56:24 crc kubenswrapper[4911]: I0929 21:56:24.342265 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9cecf675-656c-4eab-97c6-7fbd57ee26e8-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-d6sx5\" (UID: \"9cecf675-656c-4eab-97c6-7fbd57ee26e8\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-d6sx5" Sep 29 21:56:24 crc kubenswrapper[4911]: I0929 21:56:24.342461 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ppx94\" (UniqueName: \"kubernetes.io/projected/9cecf675-656c-4eab-97c6-7fbd57ee26e8-kube-api-access-ppx94\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-d6sx5\" (UID: \"9cecf675-656c-4eab-97c6-7fbd57ee26e8\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-d6sx5" Sep 29 21:56:24 crc kubenswrapper[4911]: I0929 21:56:24.342566 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9cecf675-656c-4eab-97c6-7fbd57ee26e8-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-d6sx5\" (UID: \"9cecf675-656c-4eab-97c6-7fbd57ee26e8\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-d6sx5" Sep 29 21:56:24 crc kubenswrapper[4911]: I0929 21:56:24.342699 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9cecf675-656c-4eab-97c6-7fbd57ee26e8-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-d6sx5\" (UID: \"9cecf675-656c-4eab-97c6-7fbd57ee26e8\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-d6sx5" Sep 29 21:56:24 crc kubenswrapper[4911]: I0929 21:56:24.444365 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/9cecf675-656c-4eab-97c6-7fbd57ee26e8-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-d6sx5\" (UID: \"9cecf675-656c-4eab-97c6-7fbd57ee26e8\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-d6sx5" Sep 29 21:56:24 crc kubenswrapper[4911]: I0929 21:56:24.444458 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9cecf675-656c-4eab-97c6-7fbd57ee26e8-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-d6sx5\" (UID: \"9cecf675-656c-4eab-97c6-7fbd57ee26e8\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-d6sx5" Sep 29 21:56:24 crc kubenswrapper[4911]: I0929 21:56:24.444494 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ppx94\" (UniqueName: \"kubernetes.io/projected/9cecf675-656c-4eab-97c6-7fbd57ee26e8-kube-api-access-ppx94\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-d6sx5\" (UID: \"9cecf675-656c-4eab-97c6-7fbd57ee26e8\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-d6sx5" Sep 29 21:56:24 crc kubenswrapper[4911]: I0929 21:56:24.444539 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9cecf675-656c-4eab-97c6-7fbd57ee26e8-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-d6sx5\" (UID: \"9cecf675-656c-4eab-97c6-7fbd57ee26e8\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-d6sx5" Sep 29 21:56:24 crc kubenswrapper[4911]: I0929 21:56:24.444612 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9cecf675-656c-4eab-97c6-7fbd57ee26e8-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-d6sx5\" (UID: \"9cecf675-656c-4eab-97c6-7fbd57ee26e8\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-d6sx5" Sep 29 21:56:24 crc kubenswrapper[4911]: I0929 21:56:24.446088 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/9cecf675-656c-4eab-97c6-7fbd57ee26e8-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-d6sx5\" (UID: \"9cecf675-656c-4eab-97c6-7fbd57ee26e8\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-d6sx5" Sep 29 21:56:24 crc kubenswrapper[4911]: I0929 21:56:24.450009 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9cecf675-656c-4eab-97c6-7fbd57ee26e8-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-d6sx5\" (UID: \"9cecf675-656c-4eab-97c6-7fbd57ee26e8\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-d6sx5" Sep 29 21:56:24 crc kubenswrapper[4911]: I0929 21:56:24.451846 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9cecf675-656c-4eab-97c6-7fbd57ee26e8-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-d6sx5\" (UID: \"9cecf675-656c-4eab-97c6-7fbd57ee26e8\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-d6sx5" Sep 29 21:56:24 crc kubenswrapper[4911]: I0929 21:56:24.458442 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9cecf675-656c-4eab-97c6-7fbd57ee26e8-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-d6sx5\" (UID: \"9cecf675-656c-4eab-97c6-7fbd57ee26e8\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-d6sx5" Sep 29 21:56:24 crc kubenswrapper[4911]: I0929 21:56:24.464197 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ppx94\" (UniqueName: \"kubernetes.io/projected/9cecf675-656c-4eab-97c6-7fbd57ee26e8-kube-api-access-ppx94\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-d6sx5\" (UID: \"9cecf675-656c-4eab-97c6-7fbd57ee26e8\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-d6sx5" Sep 29 21:56:24 crc kubenswrapper[4911]: I0929 21:56:24.574735 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-d6sx5" Sep 29 21:56:25 crc kubenswrapper[4911]: I0929 21:56:25.208734 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-d6sx5"] Sep 29 21:56:25 crc kubenswrapper[4911]: W0929 21:56:25.212188 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9cecf675_656c_4eab_97c6_7fbd57ee26e8.slice/crio-8c0c27e7a05dcf7ca21544df8e22a4caacd91a28b2954d505c52ccad6b80909f WatchSource:0}: Error finding container 8c0c27e7a05dcf7ca21544df8e22a4caacd91a28b2954d505c52ccad6b80909f: Status 404 returned error can't find the container with id 8c0c27e7a05dcf7ca21544df8e22a4caacd91a28b2954d505c52ccad6b80909f Sep 29 21:56:26 crc kubenswrapper[4911]: I0929 21:56:26.093315 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-d6sx5" event={"ID":"9cecf675-656c-4eab-97c6-7fbd57ee26e8","Type":"ContainerStarted","Data":"26e1c9a035265a5452b3d0344b5c6988138eec77d716b91db25bb6795d9265ad"} Sep 29 21:56:26 crc kubenswrapper[4911]: I0929 21:56:26.093571 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-d6sx5" event={"ID":"9cecf675-656c-4eab-97c6-7fbd57ee26e8","Type":"ContainerStarted","Data":"8c0c27e7a05dcf7ca21544df8e22a4caacd91a28b2954d505c52ccad6b80909f"} Sep 29 21:56:26 crc kubenswrapper[4911]: I0929 21:56:26.114579 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-d6sx5" podStartSLOduration=1.573798236 podStartE2EDuration="2.114558229s" podCreationTimestamp="2025-09-29 21:56:24 +0000 UTC" firstStartedPulling="2025-09-29 21:56:25.215331029 +0000 UTC m=+1863.192443740" lastFinishedPulling="2025-09-29 21:56:25.756091022 +0000 UTC m=+1863.733203733" observedRunningTime="2025-09-29 21:56:26.109715809 +0000 UTC m=+1864.086828480" watchObservedRunningTime="2025-09-29 21:56:26.114558229 +0000 UTC m=+1864.091670940" Sep 29 21:57:25 crc kubenswrapper[4911]: I0929 21:57:25.211644 4911 patch_prober.go:28] interesting pod/machine-config-daemon-w647f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 21:57:25 crc kubenswrapper[4911]: I0929 21:57:25.212387 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 21:57:36 crc kubenswrapper[4911]: I0929 21:57:36.834250 4911 generic.go:334] "Generic (PLEG): container finished" podID="9cecf675-656c-4eab-97c6-7fbd57ee26e8" containerID="26e1c9a035265a5452b3d0344b5c6988138eec77d716b91db25bb6795d9265ad" exitCode=0 Sep 29 21:57:36 crc kubenswrapper[4911]: I0929 21:57:36.834374 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-d6sx5" event={"ID":"9cecf675-656c-4eab-97c6-7fbd57ee26e8","Type":"ContainerDied","Data":"26e1c9a035265a5452b3d0344b5c6988138eec77d716b91db25bb6795d9265ad"} Sep 29 21:57:38 crc kubenswrapper[4911]: I0929 21:57:38.360991 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-d6sx5" Sep 29 21:57:38 crc kubenswrapper[4911]: I0929 21:57:38.501099 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9cecf675-656c-4eab-97c6-7fbd57ee26e8-inventory\") pod \"9cecf675-656c-4eab-97c6-7fbd57ee26e8\" (UID: \"9cecf675-656c-4eab-97c6-7fbd57ee26e8\") " Sep 29 21:57:38 crc kubenswrapper[4911]: I0929 21:57:38.501158 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ppx94\" (UniqueName: \"kubernetes.io/projected/9cecf675-656c-4eab-97c6-7fbd57ee26e8-kube-api-access-ppx94\") pod \"9cecf675-656c-4eab-97c6-7fbd57ee26e8\" (UID: \"9cecf675-656c-4eab-97c6-7fbd57ee26e8\") " Sep 29 21:57:38 crc kubenswrapper[4911]: I0929 21:57:38.501221 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9cecf675-656c-4eab-97c6-7fbd57ee26e8-ssh-key\") pod \"9cecf675-656c-4eab-97c6-7fbd57ee26e8\" (UID: \"9cecf675-656c-4eab-97c6-7fbd57ee26e8\") " Sep 29 21:57:38 crc kubenswrapper[4911]: I0929 21:57:38.501307 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/9cecf675-656c-4eab-97c6-7fbd57ee26e8-ovncontroller-config-0\") pod \"9cecf675-656c-4eab-97c6-7fbd57ee26e8\" (UID: \"9cecf675-656c-4eab-97c6-7fbd57ee26e8\") " Sep 29 21:57:38 crc kubenswrapper[4911]: I0929 21:57:38.501381 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9cecf675-656c-4eab-97c6-7fbd57ee26e8-ovn-combined-ca-bundle\") pod \"9cecf675-656c-4eab-97c6-7fbd57ee26e8\" (UID: \"9cecf675-656c-4eab-97c6-7fbd57ee26e8\") " Sep 29 21:57:38 crc kubenswrapper[4911]: I0929 21:57:38.507991 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9cecf675-656c-4eab-97c6-7fbd57ee26e8-kube-api-access-ppx94" (OuterVolumeSpecName: "kube-api-access-ppx94") pod "9cecf675-656c-4eab-97c6-7fbd57ee26e8" (UID: "9cecf675-656c-4eab-97c6-7fbd57ee26e8"). InnerVolumeSpecName "kube-api-access-ppx94". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:57:38 crc kubenswrapper[4911]: I0929 21:57:38.508951 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9cecf675-656c-4eab-97c6-7fbd57ee26e8-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "9cecf675-656c-4eab-97c6-7fbd57ee26e8" (UID: "9cecf675-656c-4eab-97c6-7fbd57ee26e8"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:57:38 crc kubenswrapper[4911]: I0929 21:57:38.533985 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9cecf675-656c-4eab-97c6-7fbd57ee26e8-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "9cecf675-656c-4eab-97c6-7fbd57ee26e8" (UID: "9cecf675-656c-4eab-97c6-7fbd57ee26e8"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:57:38 crc kubenswrapper[4911]: I0929 21:57:38.542415 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9cecf675-656c-4eab-97c6-7fbd57ee26e8-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "9cecf675-656c-4eab-97c6-7fbd57ee26e8" (UID: "9cecf675-656c-4eab-97c6-7fbd57ee26e8"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 21:57:38 crc kubenswrapper[4911]: I0929 21:57:38.544566 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9cecf675-656c-4eab-97c6-7fbd57ee26e8-inventory" (OuterVolumeSpecName: "inventory") pod "9cecf675-656c-4eab-97c6-7fbd57ee26e8" (UID: "9cecf675-656c-4eab-97c6-7fbd57ee26e8"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:57:38 crc kubenswrapper[4911]: I0929 21:57:38.603922 4911 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9cecf675-656c-4eab-97c6-7fbd57ee26e8-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 21:57:38 crc kubenswrapper[4911]: I0929 21:57:38.603975 4911 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/9cecf675-656c-4eab-97c6-7fbd57ee26e8-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Sep 29 21:57:38 crc kubenswrapper[4911]: I0929 21:57:38.603996 4911 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9cecf675-656c-4eab-97c6-7fbd57ee26e8-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 21:57:38 crc kubenswrapper[4911]: I0929 21:57:38.604015 4911 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9cecf675-656c-4eab-97c6-7fbd57ee26e8-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 21:57:38 crc kubenswrapper[4911]: I0929 21:57:38.604033 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ppx94\" (UniqueName: \"kubernetes.io/projected/9cecf675-656c-4eab-97c6-7fbd57ee26e8-kube-api-access-ppx94\") on node \"crc\" DevicePath \"\"" Sep 29 21:57:38 crc kubenswrapper[4911]: I0929 21:57:38.859260 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-d6sx5" event={"ID":"9cecf675-656c-4eab-97c6-7fbd57ee26e8","Type":"ContainerDied","Data":"8c0c27e7a05dcf7ca21544df8e22a4caacd91a28b2954d505c52ccad6b80909f"} Sep 29 21:57:38 crc kubenswrapper[4911]: I0929 21:57:38.859527 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8c0c27e7a05dcf7ca21544df8e22a4caacd91a28b2954d505c52ccad6b80909f" Sep 29 21:57:38 crc kubenswrapper[4911]: I0929 21:57:38.859314 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-d6sx5" Sep 29 21:57:38 crc kubenswrapper[4911]: I0929 21:57:38.985494 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vptdq"] Sep 29 21:57:38 crc kubenswrapper[4911]: E0929 21:57:38.985982 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9cecf675-656c-4eab-97c6-7fbd57ee26e8" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Sep 29 21:57:38 crc kubenswrapper[4911]: I0929 21:57:38.986005 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="9cecf675-656c-4eab-97c6-7fbd57ee26e8" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Sep 29 21:57:38 crc kubenswrapper[4911]: I0929 21:57:38.986227 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="9cecf675-656c-4eab-97c6-7fbd57ee26e8" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Sep 29 21:57:38 crc kubenswrapper[4911]: I0929 21:57:38.986952 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vptdq" Sep 29 21:57:38 crc kubenswrapper[4911]: I0929 21:57:38.996251 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 21:57:39 crc kubenswrapper[4911]: I0929 21:57:39.000731 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 21:57:39 crc kubenswrapper[4911]: I0929 21:57:39.001013 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Sep 29 21:57:39 crc kubenswrapper[4911]: I0929 21:57:39.001544 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 21:57:39 crc kubenswrapper[4911]: I0929 21:57:39.002185 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-h4cgt" Sep 29 21:57:39 crc kubenswrapper[4911]: I0929 21:57:39.002279 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Sep 29 21:57:39 crc kubenswrapper[4911]: I0929 21:57:39.009750 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vptdq"] Sep 29 21:57:39 crc kubenswrapper[4911]: I0929 21:57:39.116696 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b40f1414-088a-40e3-a07c-041c6e461771-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-vptdq\" (UID: \"b40f1414-088a-40e3-a07c-041c6e461771\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vptdq" Sep 29 21:57:39 crc kubenswrapper[4911]: I0929 21:57:39.116783 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-85gt4\" (UniqueName: \"kubernetes.io/projected/b40f1414-088a-40e3-a07c-041c6e461771-kube-api-access-85gt4\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-vptdq\" (UID: \"b40f1414-088a-40e3-a07c-041c6e461771\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vptdq" Sep 29 21:57:39 crc kubenswrapper[4911]: I0929 21:57:39.116935 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/b40f1414-088a-40e3-a07c-041c6e461771-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-vptdq\" (UID: \"b40f1414-088a-40e3-a07c-041c6e461771\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vptdq" Sep 29 21:57:39 crc kubenswrapper[4911]: I0929 21:57:39.117082 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b40f1414-088a-40e3-a07c-041c6e461771-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-vptdq\" (UID: \"b40f1414-088a-40e3-a07c-041c6e461771\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vptdq" Sep 29 21:57:39 crc kubenswrapper[4911]: I0929 21:57:39.117109 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/b40f1414-088a-40e3-a07c-041c6e461771-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-vptdq\" (UID: \"b40f1414-088a-40e3-a07c-041c6e461771\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vptdq" Sep 29 21:57:39 crc kubenswrapper[4911]: I0929 21:57:39.117125 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b40f1414-088a-40e3-a07c-041c6e461771-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-vptdq\" (UID: \"b40f1414-088a-40e3-a07c-041c6e461771\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vptdq" Sep 29 21:57:39 crc kubenswrapper[4911]: I0929 21:57:39.218798 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b40f1414-088a-40e3-a07c-041c6e461771-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-vptdq\" (UID: \"b40f1414-088a-40e3-a07c-041c6e461771\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vptdq" Sep 29 21:57:39 crc kubenswrapper[4911]: I0929 21:57:39.218867 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-85gt4\" (UniqueName: \"kubernetes.io/projected/b40f1414-088a-40e3-a07c-041c6e461771-kube-api-access-85gt4\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-vptdq\" (UID: \"b40f1414-088a-40e3-a07c-041c6e461771\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vptdq" Sep 29 21:57:39 crc kubenswrapper[4911]: I0929 21:57:39.218915 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/b40f1414-088a-40e3-a07c-041c6e461771-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-vptdq\" (UID: \"b40f1414-088a-40e3-a07c-041c6e461771\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vptdq" Sep 29 21:57:39 crc kubenswrapper[4911]: I0929 21:57:39.219015 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b40f1414-088a-40e3-a07c-041c6e461771-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-vptdq\" (UID: \"b40f1414-088a-40e3-a07c-041c6e461771\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vptdq" Sep 29 21:57:39 crc kubenswrapper[4911]: I0929 21:57:39.219040 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/b40f1414-088a-40e3-a07c-041c6e461771-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-vptdq\" (UID: \"b40f1414-088a-40e3-a07c-041c6e461771\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vptdq" Sep 29 21:57:39 crc kubenswrapper[4911]: I0929 21:57:39.219059 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b40f1414-088a-40e3-a07c-041c6e461771-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-vptdq\" (UID: \"b40f1414-088a-40e3-a07c-041c6e461771\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vptdq" Sep 29 21:57:39 crc kubenswrapper[4911]: I0929 21:57:39.223631 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b40f1414-088a-40e3-a07c-041c6e461771-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-vptdq\" (UID: \"b40f1414-088a-40e3-a07c-041c6e461771\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vptdq" Sep 29 21:57:39 crc kubenswrapper[4911]: I0929 21:57:39.223699 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/b40f1414-088a-40e3-a07c-041c6e461771-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-vptdq\" (UID: \"b40f1414-088a-40e3-a07c-041c6e461771\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vptdq" Sep 29 21:57:39 crc kubenswrapper[4911]: I0929 21:57:39.225196 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b40f1414-088a-40e3-a07c-041c6e461771-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-vptdq\" (UID: \"b40f1414-088a-40e3-a07c-041c6e461771\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vptdq" Sep 29 21:57:39 crc kubenswrapper[4911]: I0929 21:57:39.228219 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b40f1414-088a-40e3-a07c-041c6e461771-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-vptdq\" (UID: \"b40f1414-088a-40e3-a07c-041c6e461771\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vptdq" Sep 29 21:57:39 crc kubenswrapper[4911]: I0929 21:57:39.228635 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/b40f1414-088a-40e3-a07c-041c6e461771-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-vptdq\" (UID: \"b40f1414-088a-40e3-a07c-041c6e461771\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vptdq" Sep 29 21:57:39 crc kubenswrapper[4911]: I0929 21:57:39.237248 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-85gt4\" (UniqueName: \"kubernetes.io/projected/b40f1414-088a-40e3-a07c-041c6e461771-kube-api-access-85gt4\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-vptdq\" (UID: \"b40f1414-088a-40e3-a07c-041c6e461771\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vptdq" Sep 29 21:57:39 crc kubenswrapper[4911]: I0929 21:57:39.335454 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vptdq" Sep 29 21:57:39 crc kubenswrapper[4911]: I0929 21:57:39.654668 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vptdq"] Sep 29 21:57:39 crc kubenswrapper[4911]: I0929 21:57:39.868705 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vptdq" event={"ID":"b40f1414-088a-40e3-a07c-041c6e461771","Type":"ContainerStarted","Data":"b53c0134ddc7d2249b9a96bc1417561331a48b3aba211ddf4442bf3953ac11d1"} Sep 29 21:57:40 crc kubenswrapper[4911]: I0929 21:57:40.882449 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vptdq" event={"ID":"b40f1414-088a-40e3-a07c-041c6e461771","Type":"ContainerStarted","Data":"817d6f5d5c40b6e7cb0ca7cb71db173559083642bf1db6306b3c35180ed91dd4"} Sep 29 21:57:40 crc kubenswrapper[4911]: I0929 21:57:40.916580 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vptdq" podStartSLOduration=2.45608774 podStartE2EDuration="2.91655248s" podCreationTimestamp="2025-09-29 21:57:38 +0000 UTC" firstStartedPulling="2025-09-29 21:57:39.665974269 +0000 UTC m=+1937.643086940" lastFinishedPulling="2025-09-29 21:57:40.126439009 +0000 UTC m=+1938.103551680" observedRunningTime="2025-09-29 21:57:40.909688016 +0000 UTC m=+1938.886800787" watchObservedRunningTime="2025-09-29 21:57:40.91655248 +0000 UTC m=+1938.893665191" Sep 29 21:57:50 crc kubenswrapper[4911]: I0929 21:57:50.334468 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-wmx9x"] Sep 29 21:57:50 crc kubenswrapper[4911]: I0929 21:57:50.338333 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wmx9x" Sep 29 21:57:50 crc kubenswrapper[4911]: I0929 21:57:50.346183 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-wmx9x"] Sep 29 21:57:50 crc kubenswrapper[4911]: I0929 21:57:50.456544 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/714a8b68-402c-4f86-b995-ac8cc8849b21-catalog-content\") pod \"redhat-operators-wmx9x\" (UID: \"714a8b68-402c-4f86-b995-ac8cc8849b21\") " pod="openshift-marketplace/redhat-operators-wmx9x" Sep 29 21:57:50 crc kubenswrapper[4911]: I0929 21:57:50.457222 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gnpql\" (UniqueName: \"kubernetes.io/projected/714a8b68-402c-4f86-b995-ac8cc8849b21-kube-api-access-gnpql\") pod \"redhat-operators-wmx9x\" (UID: \"714a8b68-402c-4f86-b995-ac8cc8849b21\") " pod="openshift-marketplace/redhat-operators-wmx9x" Sep 29 21:57:50 crc kubenswrapper[4911]: I0929 21:57:50.457963 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/714a8b68-402c-4f86-b995-ac8cc8849b21-utilities\") pod \"redhat-operators-wmx9x\" (UID: \"714a8b68-402c-4f86-b995-ac8cc8849b21\") " pod="openshift-marketplace/redhat-operators-wmx9x" Sep 29 21:57:50 crc kubenswrapper[4911]: I0929 21:57:50.527953 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-p9hz8"] Sep 29 21:57:50 crc kubenswrapper[4911]: I0929 21:57:50.530921 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-p9hz8" Sep 29 21:57:50 crc kubenswrapper[4911]: I0929 21:57:50.544902 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-p9hz8"] Sep 29 21:57:50 crc kubenswrapper[4911]: I0929 21:57:50.560398 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8hwcr\" (UniqueName: \"kubernetes.io/projected/082c9964-6925-4d67-b4fc-33f56c957dda-kube-api-access-8hwcr\") pod \"community-operators-p9hz8\" (UID: \"082c9964-6925-4d67-b4fc-33f56c957dda\") " pod="openshift-marketplace/community-operators-p9hz8" Sep 29 21:57:50 crc kubenswrapper[4911]: I0929 21:57:50.560607 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/714a8b68-402c-4f86-b995-ac8cc8849b21-utilities\") pod \"redhat-operators-wmx9x\" (UID: \"714a8b68-402c-4f86-b995-ac8cc8849b21\") " pod="openshift-marketplace/redhat-operators-wmx9x" Sep 29 21:57:50 crc kubenswrapper[4911]: I0929 21:57:50.560740 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/082c9964-6925-4d67-b4fc-33f56c957dda-catalog-content\") pod \"community-operators-p9hz8\" (UID: \"082c9964-6925-4d67-b4fc-33f56c957dda\") " pod="openshift-marketplace/community-operators-p9hz8" Sep 29 21:57:50 crc kubenswrapper[4911]: I0929 21:57:50.560801 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/714a8b68-402c-4f86-b995-ac8cc8849b21-catalog-content\") pod \"redhat-operators-wmx9x\" (UID: \"714a8b68-402c-4f86-b995-ac8cc8849b21\") " pod="openshift-marketplace/redhat-operators-wmx9x" Sep 29 21:57:50 crc kubenswrapper[4911]: I0929 21:57:50.560864 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gnpql\" (UniqueName: \"kubernetes.io/projected/714a8b68-402c-4f86-b995-ac8cc8849b21-kube-api-access-gnpql\") pod \"redhat-operators-wmx9x\" (UID: \"714a8b68-402c-4f86-b995-ac8cc8849b21\") " pod="openshift-marketplace/redhat-operators-wmx9x" Sep 29 21:57:50 crc kubenswrapper[4911]: I0929 21:57:50.560903 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/082c9964-6925-4d67-b4fc-33f56c957dda-utilities\") pod \"community-operators-p9hz8\" (UID: \"082c9964-6925-4d67-b4fc-33f56c957dda\") " pod="openshift-marketplace/community-operators-p9hz8" Sep 29 21:57:50 crc kubenswrapper[4911]: I0929 21:57:50.561122 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/714a8b68-402c-4f86-b995-ac8cc8849b21-utilities\") pod \"redhat-operators-wmx9x\" (UID: \"714a8b68-402c-4f86-b995-ac8cc8849b21\") " pod="openshift-marketplace/redhat-operators-wmx9x" Sep 29 21:57:50 crc kubenswrapper[4911]: I0929 21:57:50.561175 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/714a8b68-402c-4f86-b995-ac8cc8849b21-catalog-content\") pod \"redhat-operators-wmx9x\" (UID: \"714a8b68-402c-4f86-b995-ac8cc8849b21\") " pod="openshift-marketplace/redhat-operators-wmx9x" Sep 29 21:57:50 crc kubenswrapper[4911]: I0929 21:57:50.587319 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gnpql\" (UniqueName: \"kubernetes.io/projected/714a8b68-402c-4f86-b995-ac8cc8849b21-kube-api-access-gnpql\") pod \"redhat-operators-wmx9x\" (UID: \"714a8b68-402c-4f86-b995-ac8cc8849b21\") " pod="openshift-marketplace/redhat-operators-wmx9x" Sep 29 21:57:50 crc kubenswrapper[4911]: I0929 21:57:50.662573 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/082c9964-6925-4d67-b4fc-33f56c957dda-utilities\") pod \"community-operators-p9hz8\" (UID: \"082c9964-6925-4d67-b4fc-33f56c957dda\") " pod="openshift-marketplace/community-operators-p9hz8" Sep 29 21:57:50 crc kubenswrapper[4911]: I0929 21:57:50.662666 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8hwcr\" (UniqueName: \"kubernetes.io/projected/082c9964-6925-4d67-b4fc-33f56c957dda-kube-api-access-8hwcr\") pod \"community-operators-p9hz8\" (UID: \"082c9964-6925-4d67-b4fc-33f56c957dda\") " pod="openshift-marketplace/community-operators-p9hz8" Sep 29 21:57:50 crc kubenswrapper[4911]: I0929 21:57:50.662833 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/082c9964-6925-4d67-b4fc-33f56c957dda-catalog-content\") pod \"community-operators-p9hz8\" (UID: \"082c9964-6925-4d67-b4fc-33f56c957dda\") " pod="openshift-marketplace/community-operators-p9hz8" Sep 29 21:57:50 crc kubenswrapper[4911]: I0929 21:57:50.663199 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/082c9964-6925-4d67-b4fc-33f56c957dda-utilities\") pod \"community-operators-p9hz8\" (UID: \"082c9964-6925-4d67-b4fc-33f56c957dda\") " pod="openshift-marketplace/community-operators-p9hz8" Sep 29 21:57:50 crc kubenswrapper[4911]: I0929 21:57:50.663220 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/082c9964-6925-4d67-b4fc-33f56c957dda-catalog-content\") pod \"community-operators-p9hz8\" (UID: \"082c9964-6925-4d67-b4fc-33f56c957dda\") " pod="openshift-marketplace/community-operators-p9hz8" Sep 29 21:57:50 crc kubenswrapper[4911]: I0929 21:57:50.681889 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wmx9x" Sep 29 21:57:50 crc kubenswrapper[4911]: I0929 21:57:50.689499 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8hwcr\" (UniqueName: \"kubernetes.io/projected/082c9964-6925-4d67-b4fc-33f56c957dda-kube-api-access-8hwcr\") pod \"community-operators-p9hz8\" (UID: \"082c9964-6925-4d67-b4fc-33f56c957dda\") " pod="openshift-marketplace/community-operators-p9hz8" Sep 29 21:57:50 crc kubenswrapper[4911]: I0929 21:57:50.854948 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-p9hz8" Sep 29 21:57:51 crc kubenswrapper[4911]: I0929 21:57:51.036481 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-wmx9x"] Sep 29 21:57:51 crc kubenswrapper[4911]: I0929 21:57:51.491904 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-p9hz8"] Sep 29 21:57:51 crc kubenswrapper[4911]: W0929 21:57:51.497704 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod082c9964_6925_4d67_b4fc_33f56c957dda.slice/crio-1a5936de85efd6d65532bd7d88a1bb64ab01a5a3c4b95f74bbe4f515efcc7704 WatchSource:0}: Error finding container 1a5936de85efd6d65532bd7d88a1bb64ab01a5a3c4b95f74bbe4f515efcc7704: Status 404 returned error can't find the container with id 1a5936de85efd6d65532bd7d88a1bb64ab01a5a3c4b95f74bbe4f515efcc7704 Sep 29 21:57:52 crc kubenswrapper[4911]: I0929 21:57:52.023411 4911 generic.go:334] "Generic (PLEG): container finished" podID="714a8b68-402c-4f86-b995-ac8cc8849b21" containerID="93ce40c7250047f415751d9745c4fee9832f8579668e08727eba974bf6b8639f" exitCode=0 Sep 29 21:57:52 crc kubenswrapper[4911]: I0929 21:57:52.023532 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wmx9x" event={"ID":"714a8b68-402c-4f86-b995-ac8cc8849b21","Type":"ContainerDied","Data":"93ce40c7250047f415751d9745c4fee9832f8579668e08727eba974bf6b8639f"} Sep 29 21:57:52 crc kubenswrapper[4911]: I0929 21:57:52.023821 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wmx9x" event={"ID":"714a8b68-402c-4f86-b995-ac8cc8849b21","Type":"ContainerStarted","Data":"dbace4d5a8b9729671407ae8f36eb584b100bd2a1afb5974c920e308db5fba7f"} Sep 29 21:57:52 crc kubenswrapper[4911]: I0929 21:57:52.025523 4911 generic.go:334] "Generic (PLEG): container finished" podID="082c9964-6925-4d67-b4fc-33f56c957dda" containerID="697ef491cc86986d82f26e1f94d4a36e1a951be66b7dee1173458a8d5b68c4ef" exitCode=0 Sep 29 21:57:52 crc kubenswrapper[4911]: I0929 21:57:52.025553 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-p9hz8" event={"ID":"082c9964-6925-4d67-b4fc-33f56c957dda","Type":"ContainerDied","Data":"697ef491cc86986d82f26e1f94d4a36e1a951be66b7dee1173458a8d5b68c4ef"} Sep 29 21:57:52 crc kubenswrapper[4911]: I0929 21:57:52.025572 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-p9hz8" event={"ID":"082c9964-6925-4d67-b4fc-33f56c957dda","Type":"ContainerStarted","Data":"1a5936de85efd6d65532bd7d88a1bb64ab01a5a3c4b95f74bbe4f515efcc7704"} Sep 29 21:57:53 crc kubenswrapper[4911]: I0929 21:57:53.036413 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-p9hz8" event={"ID":"082c9964-6925-4d67-b4fc-33f56c957dda","Type":"ContainerStarted","Data":"3057f45ac71d060f3cbf7f71e061d70d3e342ddeb846a42c7132c6f600388701"} Sep 29 21:57:53 crc kubenswrapper[4911]: I0929 21:57:53.041162 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wmx9x" event={"ID":"714a8b68-402c-4f86-b995-ac8cc8849b21","Type":"ContainerStarted","Data":"eec775fbc5cde4db188f46da8ce82845728fd47851699fc98858ab9353cbd110"} Sep 29 21:57:53 crc kubenswrapper[4911]: I0929 21:57:53.737782 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-xvjqv"] Sep 29 21:57:53 crc kubenswrapper[4911]: I0929 21:57:53.742149 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xvjqv" Sep 29 21:57:53 crc kubenswrapper[4911]: I0929 21:57:53.748564 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-xvjqv"] Sep 29 21:57:53 crc kubenswrapper[4911]: I0929 21:57:53.817948 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-grqwd\" (UniqueName: \"kubernetes.io/projected/d4e66ca0-22c9-44b1-88fc-35b469649d8a-kube-api-access-grqwd\") pod \"redhat-marketplace-xvjqv\" (UID: \"d4e66ca0-22c9-44b1-88fc-35b469649d8a\") " pod="openshift-marketplace/redhat-marketplace-xvjqv" Sep 29 21:57:53 crc kubenswrapper[4911]: I0929 21:57:53.818373 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d4e66ca0-22c9-44b1-88fc-35b469649d8a-utilities\") pod \"redhat-marketplace-xvjqv\" (UID: \"d4e66ca0-22c9-44b1-88fc-35b469649d8a\") " pod="openshift-marketplace/redhat-marketplace-xvjqv" Sep 29 21:57:53 crc kubenswrapper[4911]: I0929 21:57:53.818515 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d4e66ca0-22c9-44b1-88fc-35b469649d8a-catalog-content\") pod \"redhat-marketplace-xvjqv\" (UID: \"d4e66ca0-22c9-44b1-88fc-35b469649d8a\") " pod="openshift-marketplace/redhat-marketplace-xvjqv" Sep 29 21:57:53 crc kubenswrapper[4911]: I0929 21:57:53.919671 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d4e66ca0-22c9-44b1-88fc-35b469649d8a-catalog-content\") pod \"redhat-marketplace-xvjqv\" (UID: \"d4e66ca0-22c9-44b1-88fc-35b469649d8a\") " pod="openshift-marketplace/redhat-marketplace-xvjqv" Sep 29 21:57:53 crc kubenswrapper[4911]: I0929 21:57:53.919806 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-grqwd\" (UniqueName: \"kubernetes.io/projected/d4e66ca0-22c9-44b1-88fc-35b469649d8a-kube-api-access-grqwd\") pod \"redhat-marketplace-xvjqv\" (UID: \"d4e66ca0-22c9-44b1-88fc-35b469649d8a\") " pod="openshift-marketplace/redhat-marketplace-xvjqv" Sep 29 21:57:53 crc kubenswrapper[4911]: I0929 21:57:53.919828 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d4e66ca0-22c9-44b1-88fc-35b469649d8a-utilities\") pod \"redhat-marketplace-xvjqv\" (UID: \"d4e66ca0-22c9-44b1-88fc-35b469649d8a\") " pod="openshift-marketplace/redhat-marketplace-xvjqv" Sep 29 21:57:53 crc kubenswrapper[4911]: I0929 21:57:53.920278 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d4e66ca0-22c9-44b1-88fc-35b469649d8a-utilities\") pod \"redhat-marketplace-xvjqv\" (UID: \"d4e66ca0-22c9-44b1-88fc-35b469649d8a\") " pod="openshift-marketplace/redhat-marketplace-xvjqv" Sep 29 21:57:53 crc kubenswrapper[4911]: I0929 21:57:53.921841 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d4e66ca0-22c9-44b1-88fc-35b469649d8a-catalog-content\") pod \"redhat-marketplace-xvjqv\" (UID: \"d4e66ca0-22c9-44b1-88fc-35b469649d8a\") " pod="openshift-marketplace/redhat-marketplace-xvjqv" Sep 29 21:57:53 crc kubenswrapper[4911]: I0929 21:57:53.946114 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-grqwd\" (UniqueName: \"kubernetes.io/projected/d4e66ca0-22c9-44b1-88fc-35b469649d8a-kube-api-access-grqwd\") pod \"redhat-marketplace-xvjqv\" (UID: \"d4e66ca0-22c9-44b1-88fc-35b469649d8a\") " pod="openshift-marketplace/redhat-marketplace-xvjqv" Sep 29 21:57:54 crc kubenswrapper[4911]: I0929 21:57:54.076048 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xvjqv" Sep 29 21:57:54 crc kubenswrapper[4911]: W0929 21:57:54.553934 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd4e66ca0_22c9_44b1_88fc_35b469649d8a.slice/crio-27d990b85dddb926da11683ea732c9785c9c962193a63702b96369203dc12085 WatchSource:0}: Error finding container 27d990b85dddb926da11683ea732c9785c9c962193a63702b96369203dc12085: Status 404 returned error can't find the container with id 27d990b85dddb926da11683ea732c9785c9c962193a63702b96369203dc12085 Sep 29 21:57:54 crc kubenswrapper[4911]: I0929 21:57:54.562684 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-xvjqv"] Sep 29 21:57:55 crc kubenswrapper[4911]: I0929 21:57:55.060667 4911 generic.go:334] "Generic (PLEG): container finished" podID="082c9964-6925-4d67-b4fc-33f56c957dda" containerID="3057f45ac71d060f3cbf7f71e061d70d3e342ddeb846a42c7132c6f600388701" exitCode=0 Sep 29 21:57:55 crc kubenswrapper[4911]: I0929 21:57:55.060759 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-p9hz8" event={"ID":"082c9964-6925-4d67-b4fc-33f56c957dda","Type":"ContainerDied","Data":"3057f45ac71d060f3cbf7f71e061d70d3e342ddeb846a42c7132c6f600388701"} Sep 29 21:57:55 crc kubenswrapper[4911]: I0929 21:57:55.063592 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xvjqv" event={"ID":"d4e66ca0-22c9-44b1-88fc-35b469649d8a","Type":"ContainerStarted","Data":"6fde27c55bdb9189918a480b00cbd38e5164f4f0f38097cfdcdbf95994f97976"} Sep 29 21:57:55 crc kubenswrapper[4911]: I0929 21:57:55.063631 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xvjqv" event={"ID":"d4e66ca0-22c9-44b1-88fc-35b469649d8a","Type":"ContainerStarted","Data":"27d990b85dddb926da11683ea732c9785c9c962193a63702b96369203dc12085"} Sep 29 21:57:55 crc kubenswrapper[4911]: I0929 21:57:55.068954 4911 generic.go:334] "Generic (PLEG): container finished" podID="714a8b68-402c-4f86-b995-ac8cc8849b21" containerID="eec775fbc5cde4db188f46da8ce82845728fd47851699fc98858ab9353cbd110" exitCode=0 Sep 29 21:57:55 crc kubenswrapper[4911]: I0929 21:57:55.068989 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wmx9x" event={"ID":"714a8b68-402c-4f86-b995-ac8cc8849b21","Type":"ContainerDied","Data":"eec775fbc5cde4db188f46da8ce82845728fd47851699fc98858ab9353cbd110"} Sep 29 21:57:55 crc kubenswrapper[4911]: I0929 21:57:55.211091 4911 patch_prober.go:28] interesting pod/machine-config-daemon-w647f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 21:57:55 crc kubenswrapper[4911]: I0929 21:57:55.211187 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 21:57:57 crc kubenswrapper[4911]: I0929 21:57:57.092751 4911 generic.go:334] "Generic (PLEG): container finished" podID="d4e66ca0-22c9-44b1-88fc-35b469649d8a" containerID="6fde27c55bdb9189918a480b00cbd38e5164f4f0f38097cfdcdbf95994f97976" exitCode=0 Sep 29 21:57:57 crc kubenswrapper[4911]: I0929 21:57:57.092871 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xvjqv" event={"ID":"d4e66ca0-22c9-44b1-88fc-35b469649d8a","Type":"ContainerDied","Data":"6fde27c55bdb9189918a480b00cbd38e5164f4f0f38097cfdcdbf95994f97976"} Sep 29 21:57:57 crc kubenswrapper[4911]: I0929 21:57:57.099498 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wmx9x" event={"ID":"714a8b68-402c-4f86-b995-ac8cc8849b21","Type":"ContainerStarted","Data":"98a733a39a5bc6900519aa3c69f18876e629ab4db595b958d1d692be4e7434d6"} Sep 29 21:57:57 crc kubenswrapper[4911]: I0929 21:57:57.106214 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-p9hz8" event={"ID":"082c9964-6925-4d67-b4fc-33f56c957dda","Type":"ContainerStarted","Data":"c1dcc278660b1140f515ac145ea44b59322c8145923517fb3462a281268c4a5a"} Sep 29 21:57:57 crc kubenswrapper[4911]: I0929 21:57:57.149179 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-wmx9x" podStartSLOduration=2.973582904 podStartE2EDuration="7.149159321s" podCreationTimestamp="2025-09-29 21:57:50 +0000 UTC" firstStartedPulling="2025-09-29 21:57:52.026095005 +0000 UTC m=+1950.003207686" lastFinishedPulling="2025-09-29 21:57:56.201671432 +0000 UTC m=+1954.178784103" observedRunningTime="2025-09-29 21:57:57.146682294 +0000 UTC m=+1955.123794985" watchObservedRunningTime="2025-09-29 21:57:57.149159321 +0000 UTC m=+1955.126272012" Sep 29 21:57:57 crc kubenswrapper[4911]: I0929 21:57:57.171068 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-p9hz8" podStartSLOduration=3.041846867 podStartE2EDuration="7.171052035s" podCreationTimestamp="2025-09-29 21:57:50 +0000 UTC" firstStartedPulling="2025-09-29 21:57:52.043282212 +0000 UTC m=+1950.020394903" lastFinishedPulling="2025-09-29 21:57:56.1724874 +0000 UTC m=+1954.149600071" observedRunningTime="2025-09-29 21:57:57.163114587 +0000 UTC m=+1955.140227288" watchObservedRunningTime="2025-09-29 21:57:57.171052035 +0000 UTC m=+1955.148164716" Sep 29 21:57:59 crc kubenswrapper[4911]: I0929 21:57:59.137442 4911 generic.go:334] "Generic (PLEG): container finished" podID="d4e66ca0-22c9-44b1-88fc-35b469649d8a" containerID="669453d3db543a92a19c15eb7fde1f13b3b2a7f8ef4815e3ad8344b114256d58" exitCode=0 Sep 29 21:57:59 crc kubenswrapper[4911]: I0929 21:57:59.137515 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xvjqv" event={"ID":"d4e66ca0-22c9-44b1-88fc-35b469649d8a","Type":"ContainerDied","Data":"669453d3db543a92a19c15eb7fde1f13b3b2a7f8ef4815e3ad8344b114256d58"} Sep 29 21:58:00 crc kubenswrapper[4911]: I0929 21:58:00.147179 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xvjqv" event={"ID":"d4e66ca0-22c9-44b1-88fc-35b469649d8a","Type":"ContainerStarted","Data":"e5b677878c5687f4745f8c168d1533c016fecb35b1d7ef2895e0d02ffdb319f5"} Sep 29 21:58:00 crc kubenswrapper[4911]: I0929 21:58:00.172644 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-xvjqv" podStartSLOduration=4.645818161 podStartE2EDuration="7.172593294s" podCreationTimestamp="2025-09-29 21:57:53 +0000 UTC" firstStartedPulling="2025-09-29 21:57:57.095233515 +0000 UTC m=+1955.072346186" lastFinishedPulling="2025-09-29 21:57:59.622008648 +0000 UTC m=+1957.599121319" observedRunningTime="2025-09-29 21:58:00.166979229 +0000 UTC m=+1958.144091920" watchObservedRunningTime="2025-09-29 21:58:00.172593294 +0000 UTC m=+1958.149705975" Sep 29 21:58:00 crc kubenswrapper[4911]: I0929 21:58:00.682928 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-wmx9x" Sep 29 21:58:00 crc kubenswrapper[4911]: I0929 21:58:00.682998 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-wmx9x" Sep 29 21:58:00 crc kubenswrapper[4911]: I0929 21:58:00.855432 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-p9hz8" Sep 29 21:58:00 crc kubenswrapper[4911]: I0929 21:58:00.855830 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-p9hz8" Sep 29 21:58:00 crc kubenswrapper[4911]: I0929 21:58:00.912967 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-p9hz8" Sep 29 21:58:01 crc kubenswrapper[4911]: I0929 21:58:01.231015 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-p9hz8" Sep 29 21:58:01 crc kubenswrapper[4911]: I0929 21:58:01.732863 4911 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-wmx9x" podUID="714a8b68-402c-4f86-b995-ac8cc8849b21" containerName="registry-server" probeResult="failure" output=< Sep 29 21:58:01 crc kubenswrapper[4911]: timeout: failed to connect service ":50051" within 1s Sep 29 21:58:01 crc kubenswrapper[4911]: > Sep 29 21:58:02 crc kubenswrapper[4911]: I0929 21:58:02.123371 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-p9hz8"] Sep 29 21:58:03 crc kubenswrapper[4911]: I0929 21:58:03.182145 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-p9hz8" podUID="082c9964-6925-4d67-b4fc-33f56c957dda" containerName="registry-server" containerID="cri-o://c1dcc278660b1140f515ac145ea44b59322c8145923517fb3462a281268c4a5a" gracePeriod=2 Sep 29 21:58:03 crc kubenswrapper[4911]: I0929 21:58:03.664585 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-p9hz8" Sep 29 21:58:03 crc kubenswrapper[4911]: I0929 21:58:03.836851 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/082c9964-6925-4d67-b4fc-33f56c957dda-utilities\") pod \"082c9964-6925-4d67-b4fc-33f56c957dda\" (UID: \"082c9964-6925-4d67-b4fc-33f56c957dda\") " Sep 29 21:58:03 crc kubenswrapper[4911]: I0929 21:58:03.837041 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/082c9964-6925-4d67-b4fc-33f56c957dda-catalog-content\") pod \"082c9964-6925-4d67-b4fc-33f56c957dda\" (UID: \"082c9964-6925-4d67-b4fc-33f56c957dda\") " Sep 29 21:58:03 crc kubenswrapper[4911]: I0929 21:58:03.837176 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8hwcr\" (UniqueName: \"kubernetes.io/projected/082c9964-6925-4d67-b4fc-33f56c957dda-kube-api-access-8hwcr\") pod \"082c9964-6925-4d67-b4fc-33f56c957dda\" (UID: \"082c9964-6925-4d67-b4fc-33f56c957dda\") " Sep 29 21:58:03 crc kubenswrapper[4911]: I0929 21:58:03.838171 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/082c9964-6925-4d67-b4fc-33f56c957dda-utilities" (OuterVolumeSpecName: "utilities") pod "082c9964-6925-4d67-b4fc-33f56c957dda" (UID: "082c9964-6925-4d67-b4fc-33f56c957dda"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:58:03 crc kubenswrapper[4911]: I0929 21:58:03.838726 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/082c9964-6925-4d67-b4fc-33f56c957dda-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 21:58:03 crc kubenswrapper[4911]: I0929 21:58:03.844064 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/082c9964-6925-4d67-b4fc-33f56c957dda-kube-api-access-8hwcr" (OuterVolumeSpecName: "kube-api-access-8hwcr") pod "082c9964-6925-4d67-b4fc-33f56c957dda" (UID: "082c9964-6925-4d67-b4fc-33f56c957dda"). InnerVolumeSpecName "kube-api-access-8hwcr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:58:03 crc kubenswrapper[4911]: I0929 21:58:03.881264 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/082c9964-6925-4d67-b4fc-33f56c957dda-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "082c9964-6925-4d67-b4fc-33f56c957dda" (UID: "082c9964-6925-4d67-b4fc-33f56c957dda"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:58:03 crc kubenswrapper[4911]: I0929 21:58:03.941222 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/082c9964-6925-4d67-b4fc-33f56c957dda-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 21:58:03 crc kubenswrapper[4911]: I0929 21:58:03.941513 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8hwcr\" (UniqueName: \"kubernetes.io/projected/082c9964-6925-4d67-b4fc-33f56c957dda-kube-api-access-8hwcr\") on node \"crc\" DevicePath \"\"" Sep 29 21:58:04 crc kubenswrapper[4911]: I0929 21:58:04.076887 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-xvjqv" Sep 29 21:58:04 crc kubenswrapper[4911]: I0929 21:58:04.077248 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-xvjqv" Sep 29 21:58:04 crc kubenswrapper[4911]: I0929 21:58:04.132692 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-xvjqv" Sep 29 21:58:04 crc kubenswrapper[4911]: I0929 21:58:04.194303 4911 generic.go:334] "Generic (PLEG): container finished" podID="082c9964-6925-4d67-b4fc-33f56c957dda" containerID="c1dcc278660b1140f515ac145ea44b59322c8145923517fb3462a281268c4a5a" exitCode=0 Sep 29 21:58:04 crc kubenswrapper[4911]: I0929 21:58:04.194372 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-p9hz8" Sep 29 21:58:04 crc kubenswrapper[4911]: I0929 21:58:04.194427 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-p9hz8" event={"ID":"082c9964-6925-4d67-b4fc-33f56c957dda","Type":"ContainerDied","Data":"c1dcc278660b1140f515ac145ea44b59322c8145923517fb3462a281268c4a5a"} Sep 29 21:58:04 crc kubenswrapper[4911]: I0929 21:58:04.194500 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-p9hz8" event={"ID":"082c9964-6925-4d67-b4fc-33f56c957dda","Type":"ContainerDied","Data":"1a5936de85efd6d65532bd7d88a1bb64ab01a5a3c4b95f74bbe4f515efcc7704"} Sep 29 21:58:04 crc kubenswrapper[4911]: I0929 21:58:04.194534 4911 scope.go:117] "RemoveContainer" containerID="c1dcc278660b1140f515ac145ea44b59322c8145923517fb3462a281268c4a5a" Sep 29 21:58:04 crc kubenswrapper[4911]: I0929 21:58:04.218155 4911 scope.go:117] "RemoveContainer" containerID="3057f45ac71d060f3cbf7f71e061d70d3e342ddeb846a42c7132c6f600388701" Sep 29 21:58:04 crc kubenswrapper[4911]: I0929 21:58:04.245621 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-xvjqv" Sep 29 21:58:04 crc kubenswrapper[4911]: I0929 21:58:04.255364 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-p9hz8"] Sep 29 21:58:04 crc kubenswrapper[4911]: I0929 21:58:04.255861 4911 scope.go:117] "RemoveContainer" containerID="697ef491cc86986d82f26e1f94d4a36e1a951be66b7dee1173458a8d5b68c4ef" Sep 29 21:58:04 crc kubenswrapper[4911]: I0929 21:58:04.263686 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-p9hz8"] Sep 29 21:58:04 crc kubenswrapper[4911]: I0929 21:58:04.287008 4911 scope.go:117] "RemoveContainer" containerID="c1dcc278660b1140f515ac145ea44b59322c8145923517fb3462a281268c4a5a" Sep 29 21:58:04 crc kubenswrapper[4911]: E0929 21:58:04.287459 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c1dcc278660b1140f515ac145ea44b59322c8145923517fb3462a281268c4a5a\": container with ID starting with c1dcc278660b1140f515ac145ea44b59322c8145923517fb3462a281268c4a5a not found: ID does not exist" containerID="c1dcc278660b1140f515ac145ea44b59322c8145923517fb3462a281268c4a5a" Sep 29 21:58:04 crc kubenswrapper[4911]: I0929 21:58:04.287503 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c1dcc278660b1140f515ac145ea44b59322c8145923517fb3462a281268c4a5a"} err="failed to get container status \"c1dcc278660b1140f515ac145ea44b59322c8145923517fb3462a281268c4a5a\": rpc error: code = NotFound desc = could not find container \"c1dcc278660b1140f515ac145ea44b59322c8145923517fb3462a281268c4a5a\": container with ID starting with c1dcc278660b1140f515ac145ea44b59322c8145923517fb3462a281268c4a5a not found: ID does not exist" Sep 29 21:58:04 crc kubenswrapper[4911]: I0929 21:58:04.287536 4911 scope.go:117] "RemoveContainer" containerID="3057f45ac71d060f3cbf7f71e061d70d3e342ddeb846a42c7132c6f600388701" Sep 29 21:58:04 crc kubenswrapper[4911]: E0929 21:58:04.288034 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3057f45ac71d060f3cbf7f71e061d70d3e342ddeb846a42c7132c6f600388701\": container with ID starting with 3057f45ac71d060f3cbf7f71e061d70d3e342ddeb846a42c7132c6f600388701 not found: ID does not exist" containerID="3057f45ac71d060f3cbf7f71e061d70d3e342ddeb846a42c7132c6f600388701" Sep 29 21:58:04 crc kubenswrapper[4911]: I0929 21:58:04.288075 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3057f45ac71d060f3cbf7f71e061d70d3e342ddeb846a42c7132c6f600388701"} err="failed to get container status \"3057f45ac71d060f3cbf7f71e061d70d3e342ddeb846a42c7132c6f600388701\": rpc error: code = NotFound desc = could not find container \"3057f45ac71d060f3cbf7f71e061d70d3e342ddeb846a42c7132c6f600388701\": container with ID starting with 3057f45ac71d060f3cbf7f71e061d70d3e342ddeb846a42c7132c6f600388701 not found: ID does not exist" Sep 29 21:58:04 crc kubenswrapper[4911]: I0929 21:58:04.288101 4911 scope.go:117] "RemoveContainer" containerID="697ef491cc86986d82f26e1f94d4a36e1a951be66b7dee1173458a8d5b68c4ef" Sep 29 21:58:04 crc kubenswrapper[4911]: E0929 21:58:04.288356 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"697ef491cc86986d82f26e1f94d4a36e1a951be66b7dee1173458a8d5b68c4ef\": container with ID starting with 697ef491cc86986d82f26e1f94d4a36e1a951be66b7dee1173458a8d5b68c4ef not found: ID does not exist" containerID="697ef491cc86986d82f26e1f94d4a36e1a951be66b7dee1173458a8d5b68c4ef" Sep 29 21:58:04 crc kubenswrapper[4911]: I0929 21:58:04.288390 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"697ef491cc86986d82f26e1f94d4a36e1a951be66b7dee1173458a8d5b68c4ef"} err="failed to get container status \"697ef491cc86986d82f26e1f94d4a36e1a951be66b7dee1173458a8d5b68c4ef\": rpc error: code = NotFound desc = could not find container \"697ef491cc86986d82f26e1f94d4a36e1a951be66b7dee1173458a8d5b68c4ef\": container with ID starting with 697ef491cc86986d82f26e1f94d4a36e1a951be66b7dee1173458a8d5b68c4ef not found: ID does not exist" Sep 29 21:58:04 crc kubenswrapper[4911]: I0929 21:58:04.711551 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="082c9964-6925-4d67-b4fc-33f56c957dda" path="/var/lib/kubelet/pods/082c9964-6925-4d67-b4fc-33f56c957dda/volumes" Sep 29 21:58:08 crc kubenswrapper[4911]: I0929 21:58:08.126880 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-xvjqv"] Sep 29 21:58:08 crc kubenswrapper[4911]: I0929 21:58:08.128053 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-xvjqv" podUID="d4e66ca0-22c9-44b1-88fc-35b469649d8a" containerName="registry-server" containerID="cri-o://e5b677878c5687f4745f8c168d1533c016fecb35b1d7ef2895e0d02ffdb319f5" gracePeriod=2 Sep 29 21:58:08 crc kubenswrapper[4911]: I0929 21:58:08.567905 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xvjqv" Sep 29 21:58:08 crc kubenswrapper[4911]: I0929 21:58:08.634502 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d4e66ca0-22c9-44b1-88fc-35b469649d8a-catalog-content\") pod \"d4e66ca0-22c9-44b1-88fc-35b469649d8a\" (UID: \"d4e66ca0-22c9-44b1-88fc-35b469649d8a\") " Sep 29 21:58:08 crc kubenswrapper[4911]: I0929 21:58:08.634597 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-grqwd\" (UniqueName: \"kubernetes.io/projected/d4e66ca0-22c9-44b1-88fc-35b469649d8a-kube-api-access-grqwd\") pod \"d4e66ca0-22c9-44b1-88fc-35b469649d8a\" (UID: \"d4e66ca0-22c9-44b1-88fc-35b469649d8a\") " Sep 29 21:58:08 crc kubenswrapper[4911]: I0929 21:58:08.634633 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d4e66ca0-22c9-44b1-88fc-35b469649d8a-utilities\") pod \"d4e66ca0-22c9-44b1-88fc-35b469649d8a\" (UID: \"d4e66ca0-22c9-44b1-88fc-35b469649d8a\") " Sep 29 21:58:08 crc kubenswrapper[4911]: I0929 21:58:08.635356 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d4e66ca0-22c9-44b1-88fc-35b469649d8a-utilities" (OuterVolumeSpecName: "utilities") pod "d4e66ca0-22c9-44b1-88fc-35b469649d8a" (UID: "d4e66ca0-22c9-44b1-88fc-35b469649d8a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:58:08 crc kubenswrapper[4911]: I0929 21:58:08.635809 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d4e66ca0-22c9-44b1-88fc-35b469649d8a-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 21:58:08 crc kubenswrapper[4911]: I0929 21:58:08.644340 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d4e66ca0-22c9-44b1-88fc-35b469649d8a-kube-api-access-grqwd" (OuterVolumeSpecName: "kube-api-access-grqwd") pod "d4e66ca0-22c9-44b1-88fc-35b469649d8a" (UID: "d4e66ca0-22c9-44b1-88fc-35b469649d8a"). InnerVolumeSpecName "kube-api-access-grqwd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:58:08 crc kubenswrapper[4911]: I0929 21:58:08.655773 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d4e66ca0-22c9-44b1-88fc-35b469649d8a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d4e66ca0-22c9-44b1-88fc-35b469649d8a" (UID: "d4e66ca0-22c9-44b1-88fc-35b469649d8a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:58:08 crc kubenswrapper[4911]: I0929 21:58:08.737984 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d4e66ca0-22c9-44b1-88fc-35b469649d8a-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 21:58:08 crc kubenswrapper[4911]: I0929 21:58:08.738016 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-grqwd\" (UniqueName: \"kubernetes.io/projected/d4e66ca0-22c9-44b1-88fc-35b469649d8a-kube-api-access-grqwd\") on node \"crc\" DevicePath \"\"" Sep 29 21:58:09 crc kubenswrapper[4911]: I0929 21:58:09.272575 4911 generic.go:334] "Generic (PLEG): container finished" podID="d4e66ca0-22c9-44b1-88fc-35b469649d8a" containerID="e5b677878c5687f4745f8c168d1533c016fecb35b1d7ef2895e0d02ffdb319f5" exitCode=0 Sep 29 21:58:09 crc kubenswrapper[4911]: I0929 21:58:09.272643 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xvjqv" Sep 29 21:58:09 crc kubenswrapper[4911]: I0929 21:58:09.272667 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xvjqv" event={"ID":"d4e66ca0-22c9-44b1-88fc-35b469649d8a","Type":"ContainerDied","Data":"e5b677878c5687f4745f8c168d1533c016fecb35b1d7ef2895e0d02ffdb319f5"} Sep 29 21:58:09 crc kubenswrapper[4911]: I0929 21:58:09.275052 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xvjqv" event={"ID":"d4e66ca0-22c9-44b1-88fc-35b469649d8a","Type":"ContainerDied","Data":"27d990b85dddb926da11683ea732c9785c9c962193a63702b96369203dc12085"} Sep 29 21:58:09 crc kubenswrapper[4911]: I0929 21:58:09.275088 4911 scope.go:117] "RemoveContainer" containerID="e5b677878c5687f4745f8c168d1533c016fecb35b1d7ef2895e0d02ffdb319f5" Sep 29 21:58:09 crc kubenswrapper[4911]: I0929 21:58:09.309330 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-xvjqv"] Sep 29 21:58:09 crc kubenswrapper[4911]: I0929 21:58:09.309934 4911 scope.go:117] "RemoveContainer" containerID="669453d3db543a92a19c15eb7fde1f13b3b2a7f8ef4815e3ad8344b114256d58" Sep 29 21:58:09 crc kubenswrapper[4911]: I0929 21:58:09.317105 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-xvjqv"] Sep 29 21:58:09 crc kubenswrapper[4911]: I0929 21:58:09.339130 4911 scope.go:117] "RemoveContainer" containerID="6fde27c55bdb9189918a480b00cbd38e5164f4f0f38097cfdcdbf95994f97976" Sep 29 21:58:09 crc kubenswrapper[4911]: I0929 21:58:09.416651 4911 scope.go:117] "RemoveContainer" containerID="e5b677878c5687f4745f8c168d1533c016fecb35b1d7ef2895e0d02ffdb319f5" Sep 29 21:58:09 crc kubenswrapper[4911]: E0929 21:58:09.417180 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e5b677878c5687f4745f8c168d1533c016fecb35b1d7ef2895e0d02ffdb319f5\": container with ID starting with e5b677878c5687f4745f8c168d1533c016fecb35b1d7ef2895e0d02ffdb319f5 not found: ID does not exist" containerID="e5b677878c5687f4745f8c168d1533c016fecb35b1d7ef2895e0d02ffdb319f5" Sep 29 21:58:09 crc kubenswrapper[4911]: I0929 21:58:09.417228 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e5b677878c5687f4745f8c168d1533c016fecb35b1d7ef2895e0d02ffdb319f5"} err="failed to get container status \"e5b677878c5687f4745f8c168d1533c016fecb35b1d7ef2895e0d02ffdb319f5\": rpc error: code = NotFound desc = could not find container \"e5b677878c5687f4745f8c168d1533c016fecb35b1d7ef2895e0d02ffdb319f5\": container with ID starting with e5b677878c5687f4745f8c168d1533c016fecb35b1d7ef2895e0d02ffdb319f5 not found: ID does not exist" Sep 29 21:58:09 crc kubenswrapper[4911]: I0929 21:58:09.417261 4911 scope.go:117] "RemoveContainer" containerID="669453d3db543a92a19c15eb7fde1f13b3b2a7f8ef4815e3ad8344b114256d58" Sep 29 21:58:09 crc kubenswrapper[4911]: E0929 21:58:09.417706 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"669453d3db543a92a19c15eb7fde1f13b3b2a7f8ef4815e3ad8344b114256d58\": container with ID starting with 669453d3db543a92a19c15eb7fde1f13b3b2a7f8ef4815e3ad8344b114256d58 not found: ID does not exist" containerID="669453d3db543a92a19c15eb7fde1f13b3b2a7f8ef4815e3ad8344b114256d58" Sep 29 21:58:09 crc kubenswrapper[4911]: I0929 21:58:09.417745 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"669453d3db543a92a19c15eb7fde1f13b3b2a7f8ef4815e3ad8344b114256d58"} err="failed to get container status \"669453d3db543a92a19c15eb7fde1f13b3b2a7f8ef4815e3ad8344b114256d58\": rpc error: code = NotFound desc = could not find container \"669453d3db543a92a19c15eb7fde1f13b3b2a7f8ef4815e3ad8344b114256d58\": container with ID starting with 669453d3db543a92a19c15eb7fde1f13b3b2a7f8ef4815e3ad8344b114256d58 not found: ID does not exist" Sep 29 21:58:09 crc kubenswrapper[4911]: I0929 21:58:09.417769 4911 scope.go:117] "RemoveContainer" containerID="6fde27c55bdb9189918a480b00cbd38e5164f4f0f38097cfdcdbf95994f97976" Sep 29 21:58:09 crc kubenswrapper[4911]: E0929 21:58:09.418048 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6fde27c55bdb9189918a480b00cbd38e5164f4f0f38097cfdcdbf95994f97976\": container with ID starting with 6fde27c55bdb9189918a480b00cbd38e5164f4f0f38097cfdcdbf95994f97976 not found: ID does not exist" containerID="6fde27c55bdb9189918a480b00cbd38e5164f4f0f38097cfdcdbf95994f97976" Sep 29 21:58:09 crc kubenswrapper[4911]: I0929 21:58:09.418082 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6fde27c55bdb9189918a480b00cbd38e5164f4f0f38097cfdcdbf95994f97976"} err="failed to get container status \"6fde27c55bdb9189918a480b00cbd38e5164f4f0f38097cfdcdbf95994f97976\": rpc error: code = NotFound desc = could not find container \"6fde27c55bdb9189918a480b00cbd38e5164f4f0f38097cfdcdbf95994f97976\": container with ID starting with 6fde27c55bdb9189918a480b00cbd38e5164f4f0f38097cfdcdbf95994f97976 not found: ID does not exist" Sep 29 21:58:10 crc kubenswrapper[4911]: I0929 21:58:10.728265 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d4e66ca0-22c9-44b1-88fc-35b469649d8a" path="/var/lib/kubelet/pods/d4e66ca0-22c9-44b1-88fc-35b469649d8a/volumes" Sep 29 21:58:10 crc kubenswrapper[4911]: I0929 21:58:10.783745 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-wmx9x" Sep 29 21:58:10 crc kubenswrapper[4911]: I0929 21:58:10.855090 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-wmx9x" Sep 29 21:58:12 crc kubenswrapper[4911]: I0929 21:58:12.923860 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-wmx9x"] Sep 29 21:58:12 crc kubenswrapper[4911]: I0929 21:58:12.925228 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-wmx9x" podUID="714a8b68-402c-4f86-b995-ac8cc8849b21" containerName="registry-server" containerID="cri-o://98a733a39a5bc6900519aa3c69f18876e629ab4db595b958d1d692be4e7434d6" gracePeriod=2 Sep 29 21:58:13 crc kubenswrapper[4911]: I0929 21:58:13.325039 4911 generic.go:334] "Generic (PLEG): container finished" podID="714a8b68-402c-4f86-b995-ac8cc8849b21" containerID="98a733a39a5bc6900519aa3c69f18876e629ab4db595b958d1d692be4e7434d6" exitCode=0 Sep 29 21:58:13 crc kubenswrapper[4911]: I0929 21:58:13.325120 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wmx9x" event={"ID":"714a8b68-402c-4f86-b995-ac8cc8849b21","Type":"ContainerDied","Data":"98a733a39a5bc6900519aa3c69f18876e629ab4db595b958d1d692be4e7434d6"} Sep 29 21:58:13 crc kubenswrapper[4911]: I0929 21:58:13.465882 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wmx9x" Sep 29 21:58:13 crc kubenswrapper[4911]: I0929 21:58:13.643578 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/714a8b68-402c-4f86-b995-ac8cc8849b21-catalog-content\") pod \"714a8b68-402c-4f86-b995-ac8cc8849b21\" (UID: \"714a8b68-402c-4f86-b995-ac8cc8849b21\") " Sep 29 21:58:13 crc kubenswrapper[4911]: I0929 21:58:13.643758 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gnpql\" (UniqueName: \"kubernetes.io/projected/714a8b68-402c-4f86-b995-ac8cc8849b21-kube-api-access-gnpql\") pod \"714a8b68-402c-4f86-b995-ac8cc8849b21\" (UID: \"714a8b68-402c-4f86-b995-ac8cc8849b21\") " Sep 29 21:58:13 crc kubenswrapper[4911]: I0929 21:58:13.643981 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/714a8b68-402c-4f86-b995-ac8cc8849b21-utilities\") pod \"714a8b68-402c-4f86-b995-ac8cc8849b21\" (UID: \"714a8b68-402c-4f86-b995-ac8cc8849b21\") " Sep 29 21:58:13 crc kubenswrapper[4911]: I0929 21:58:13.645058 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/714a8b68-402c-4f86-b995-ac8cc8849b21-utilities" (OuterVolumeSpecName: "utilities") pod "714a8b68-402c-4f86-b995-ac8cc8849b21" (UID: "714a8b68-402c-4f86-b995-ac8cc8849b21"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:58:13 crc kubenswrapper[4911]: I0929 21:58:13.645657 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/714a8b68-402c-4f86-b995-ac8cc8849b21-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 21:58:13 crc kubenswrapper[4911]: I0929 21:58:13.657101 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/714a8b68-402c-4f86-b995-ac8cc8849b21-kube-api-access-gnpql" (OuterVolumeSpecName: "kube-api-access-gnpql") pod "714a8b68-402c-4f86-b995-ac8cc8849b21" (UID: "714a8b68-402c-4f86-b995-ac8cc8849b21"). InnerVolumeSpecName "kube-api-access-gnpql". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:58:13 crc kubenswrapper[4911]: I0929 21:58:13.749763 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gnpql\" (UniqueName: \"kubernetes.io/projected/714a8b68-402c-4f86-b995-ac8cc8849b21-kube-api-access-gnpql\") on node \"crc\" DevicePath \"\"" Sep 29 21:58:13 crc kubenswrapper[4911]: I0929 21:58:13.752744 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/714a8b68-402c-4f86-b995-ac8cc8849b21-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "714a8b68-402c-4f86-b995-ac8cc8849b21" (UID: "714a8b68-402c-4f86-b995-ac8cc8849b21"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 21:58:13 crc kubenswrapper[4911]: I0929 21:58:13.852139 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/714a8b68-402c-4f86-b995-ac8cc8849b21-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 21:58:14 crc kubenswrapper[4911]: I0929 21:58:14.341465 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wmx9x" event={"ID":"714a8b68-402c-4f86-b995-ac8cc8849b21","Type":"ContainerDied","Data":"dbace4d5a8b9729671407ae8f36eb584b100bd2a1afb5974c920e308db5fba7f"} Sep 29 21:58:14 crc kubenswrapper[4911]: I0929 21:58:14.341540 4911 scope.go:117] "RemoveContainer" containerID="98a733a39a5bc6900519aa3c69f18876e629ab4db595b958d1d692be4e7434d6" Sep 29 21:58:14 crc kubenswrapper[4911]: I0929 21:58:14.341561 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wmx9x" Sep 29 21:58:14 crc kubenswrapper[4911]: I0929 21:58:14.370756 4911 scope.go:117] "RemoveContainer" containerID="eec775fbc5cde4db188f46da8ce82845728fd47851699fc98858ab9353cbd110" Sep 29 21:58:14 crc kubenswrapper[4911]: I0929 21:58:14.413855 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-wmx9x"] Sep 29 21:58:14 crc kubenswrapper[4911]: I0929 21:58:14.423960 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-wmx9x"] Sep 29 21:58:14 crc kubenswrapper[4911]: I0929 21:58:14.424102 4911 scope.go:117] "RemoveContainer" containerID="93ce40c7250047f415751d9745c4fee9832f8579668e08727eba974bf6b8639f" Sep 29 21:58:14 crc kubenswrapper[4911]: I0929 21:58:14.712690 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="714a8b68-402c-4f86-b995-ac8cc8849b21" path="/var/lib/kubelet/pods/714a8b68-402c-4f86-b995-ac8cc8849b21/volumes" Sep 29 21:58:25 crc kubenswrapper[4911]: I0929 21:58:25.211606 4911 patch_prober.go:28] interesting pod/machine-config-daemon-w647f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 21:58:25 crc kubenswrapper[4911]: I0929 21:58:25.212292 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 21:58:25 crc kubenswrapper[4911]: I0929 21:58:25.212358 4911 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-w647f" Sep 29 21:58:25 crc kubenswrapper[4911]: I0929 21:58:25.213478 4911 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"cf3902371ff18d62ab98dcaca1c35a41b574028c73c6cba4dcb8f735f395f50a"} pod="openshift-machine-config-operator/machine-config-daemon-w647f" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 21:58:25 crc kubenswrapper[4911]: I0929 21:58:25.213577 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" containerName="machine-config-daemon" containerID="cri-o://cf3902371ff18d62ab98dcaca1c35a41b574028c73c6cba4dcb8f735f395f50a" gracePeriod=600 Sep 29 21:58:25 crc kubenswrapper[4911]: I0929 21:58:25.486678 4911 generic.go:334] "Generic (PLEG): container finished" podID="50640abc-40db-4390-82d1-f3cfc76da71c" containerID="cf3902371ff18d62ab98dcaca1c35a41b574028c73c6cba4dcb8f735f395f50a" exitCode=0 Sep 29 21:58:25 crc kubenswrapper[4911]: I0929 21:58:25.486759 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w647f" event={"ID":"50640abc-40db-4390-82d1-f3cfc76da71c","Type":"ContainerDied","Data":"cf3902371ff18d62ab98dcaca1c35a41b574028c73c6cba4dcb8f735f395f50a"} Sep 29 21:58:25 crc kubenswrapper[4911]: I0929 21:58:25.487373 4911 scope.go:117] "RemoveContainer" containerID="d13b753cc9aafc8d8aea3ea478f3d4351488778dd330e53fcc751897853d776a" Sep 29 21:58:26 crc kubenswrapper[4911]: I0929 21:58:26.499578 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w647f" event={"ID":"50640abc-40db-4390-82d1-f3cfc76da71c","Type":"ContainerStarted","Data":"36e2ce9b1f454aea68a43b1961f21ef4bddb0194f435c478ced137f34d81bd50"} Sep 29 21:58:34 crc kubenswrapper[4911]: I0929 21:58:34.602697 4911 generic.go:334] "Generic (PLEG): container finished" podID="b40f1414-088a-40e3-a07c-041c6e461771" containerID="817d6f5d5c40b6e7cb0ca7cb71db173559083642bf1db6306b3c35180ed91dd4" exitCode=0 Sep 29 21:58:34 crc kubenswrapper[4911]: I0929 21:58:34.602784 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vptdq" event={"ID":"b40f1414-088a-40e3-a07c-041c6e461771","Type":"ContainerDied","Data":"817d6f5d5c40b6e7cb0ca7cb71db173559083642bf1db6306b3c35180ed91dd4"} Sep 29 21:58:36 crc kubenswrapper[4911]: I0929 21:58:36.053780 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vptdq" Sep 29 21:58:36 crc kubenswrapper[4911]: I0929 21:58:36.223764 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b40f1414-088a-40e3-a07c-041c6e461771-neutron-metadata-combined-ca-bundle\") pod \"b40f1414-088a-40e3-a07c-041c6e461771\" (UID: \"b40f1414-088a-40e3-a07c-041c6e461771\") " Sep 29 21:58:36 crc kubenswrapper[4911]: I0929 21:58:36.223833 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/b40f1414-088a-40e3-a07c-041c6e461771-neutron-ovn-metadata-agent-neutron-config-0\") pod \"b40f1414-088a-40e3-a07c-041c6e461771\" (UID: \"b40f1414-088a-40e3-a07c-041c6e461771\") " Sep 29 21:58:36 crc kubenswrapper[4911]: I0929 21:58:36.223954 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b40f1414-088a-40e3-a07c-041c6e461771-inventory\") pod \"b40f1414-088a-40e3-a07c-041c6e461771\" (UID: \"b40f1414-088a-40e3-a07c-041c6e461771\") " Sep 29 21:58:36 crc kubenswrapper[4911]: I0929 21:58:36.223972 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b40f1414-088a-40e3-a07c-041c6e461771-ssh-key\") pod \"b40f1414-088a-40e3-a07c-041c6e461771\" (UID: \"b40f1414-088a-40e3-a07c-041c6e461771\") " Sep 29 21:58:36 crc kubenswrapper[4911]: I0929 21:58:36.223995 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-85gt4\" (UniqueName: \"kubernetes.io/projected/b40f1414-088a-40e3-a07c-041c6e461771-kube-api-access-85gt4\") pod \"b40f1414-088a-40e3-a07c-041c6e461771\" (UID: \"b40f1414-088a-40e3-a07c-041c6e461771\") " Sep 29 21:58:36 crc kubenswrapper[4911]: I0929 21:58:36.224086 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/b40f1414-088a-40e3-a07c-041c6e461771-nova-metadata-neutron-config-0\") pod \"b40f1414-088a-40e3-a07c-041c6e461771\" (UID: \"b40f1414-088a-40e3-a07c-041c6e461771\") " Sep 29 21:58:36 crc kubenswrapper[4911]: I0929 21:58:36.231013 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b40f1414-088a-40e3-a07c-041c6e461771-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "b40f1414-088a-40e3-a07c-041c6e461771" (UID: "b40f1414-088a-40e3-a07c-041c6e461771"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:58:36 crc kubenswrapper[4911]: I0929 21:58:36.231830 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b40f1414-088a-40e3-a07c-041c6e461771-kube-api-access-85gt4" (OuterVolumeSpecName: "kube-api-access-85gt4") pod "b40f1414-088a-40e3-a07c-041c6e461771" (UID: "b40f1414-088a-40e3-a07c-041c6e461771"). InnerVolumeSpecName "kube-api-access-85gt4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 21:58:36 crc kubenswrapper[4911]: I0929 21:58:36.252947 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b40f1414-088a-40e3-a07c-041c6e461771-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "b40f1414-088a-40e3-a07c-041c6e461771" (UID: "b40f1414-088a-40e3-a07c-041c6e461771"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:58:36 crc kubenswrapper[4911]: I0929 21:58:36.255349 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b40f1414-088a-40e3-a07c-041c6e461771-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "b40f1414-088a-40e3-a07c-041c6e461771" (UID: "b40f1414-088a-40e3-a07c-041c6e461771"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:58:36 crc kubenswrapper[4911]: I0929 21:58:36.258417 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b40f1414-088a-40e3-a07c-041c6e461771-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "b40f1414-088a-40e3-a07c-041c6e461771" (UID: "b40f1414-088a-40e3-a07c-041c6e461771"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:58:36 crc kubenswrapper[4911]: I0929 21:58:36.263976 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b40f1414-088a-40e3-a07c-041c6e461771-inventory" (OuterVolumeSpecName: "inventory") pod "b40f1414-088a-40e3-a07c-041c6e461771" (UID: "b40f1414-088a-40e3-a07c-041c6e461771"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 21:58:36 crc kubenswrapper[4911]: I0929 21:58:36.327144 4911 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b40f1414-088a-40e3-a07c-041c6e461771-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 21:58:36 crc kubenswrapper[4911]: I0929 21:58:36.327190 4911 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b40f1414-088a-40e3-a07c-041c6e461771-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 21:58:36 crc kubenswrapper[4911]: I0929 21:58:36.327210 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-85gt4\" (UniqueName: \"kubernetes.io/projected/b40f1414-088a-40e3-a07c-041c6e461771-kube-api-access-85gt4\") on node \"crc\" DevicePath \"\"" Sep 29 21:58:36 crc kubenswrapper[4911]: I0929 21:58:36.327232 4911 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/b40f1414-088a-40e3-a07c-041c6e461771-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Sep 29 21:58:36 crc kubenswrapper[4911]: I0929 21:58:36.327253 4911 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b40f1414-088a-40e3-a07c-041c6e461771-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 21:58:36 crc kubenswrapper[4911]: I0929 21:58:36.327275 4911 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/b40f1414-088a-40e3-a07c-041c6e461771-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Sep 29 21:58:36 crc kubenswrapper[4911]: I0929 21:58:36.630858 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vptdq" event={"ID":"b40f1414-088a-40e3-a07c-041c6e461771","Type":"ContainerDied","Data":"b53c0134ddc7d2249b9a96bc1417561331a48b3aba211ddf4442bf3953ac11d1"} Sep 29 21:58:36 crc kubenswrapper[4911]: I0929 21:58:36.630920 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b53c0134ddc7d2249b9a96bc1417561331a48b3aba211ddf4442bf3953ac11d1" Sep 29 21:58:36 crc kubenswrapper[4911]: I0929 21:58:36.630956 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vptdq" Sep 29 21:58:36 crc kubenswrapper[4911]: I0929 21:58:36.764569 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-xbkr4"] Sep 29 21:58:36 crc kubenswrapper[4911]: E0929 21:58:36.765265 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4e66ca0-22c9-44b1-88fc-35b469649d8a" containerName="extract-content" Sep 29 21:58:36 crc kubenswrapper[4911]: I0929 21:58:36.765296 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4e66ca0-22c9-44b1-88fc-35b469649d8a" containerName="extract-content" Sep 29 21:58:36 crc kubenswrapper[4911]: E0929 21:58:36.765324 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="082c9964-6925-4d67-b4fc-33f56c957dda" containerName="extract-content" Sep 29 21:58:36 crc kubenswrapper[4911]: I0929 21:58:36.765337 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="082c9964-6925-4d67-b4fc-33f56c957dda" containerName="extract-content" Sep 29 21:58:36 crc kubenswrapper[4911]: E0929 21:58:36.765366 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="714a8b68-402c-4f86-b995-ac8cc8849b21" containerName="extract-content" Sep 29 21:58:36 crc kubenswrapper[4911]: I0929 21:58:36.765378 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="714a8b68-402c-4f86-b995-ac8cc8849b21" containerName="extract-content" Sep 29 21:58:36 crc kubenswrapper[4911]: E0929 21:58:36.765400 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4e66ca0-22c9-44b1-88fc-35b469649d8a" containerName="extract-utilities" Sep 29 21:58:36 crc kubenswrapper[4911]: I0929 21:58:36.765413 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4e66ca0-22c9-44b1-88fc-35b469649d8a" containerName="extract-utilities" Sep 29 21:58:36 crc kubenswrapper[4911]: E0929 21:58:36.765432 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="714a8b68-402c-4f86-b995-ac8cc8849b21" containerName="extract-utilities" Sep 29 21:58:36 crc kubenswrapper[4911]: I0929 21:58:36.765443 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="714a8b68-402c-4f86-b995-ac8cc8849b21" containerName="extract-utilities" Sep 29 21:58:36 crc kubenswrapper[4911]: E0929 21:58:36.765464 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="082c9964-6925-4d67-b4fc-33f56c957dda" containerName="registry-server" Sep 29 21:58:36 crc kubenswrapper[4911]: I0929 21:58:36.765476 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="082c9964-6925-4d67-b4fc-33f56c957dda" containerName="registry-server" Sep 29 21:58:36 crc kubenswrapper[4911]: E0929 21:58:36.765497 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="082c9964-6925-4d67-b4fc-33f56c957dda" containerName="extract-utilities" Sep 29 21:58:36 crc kubenswrapper[4911]: I0929 21:58:36.765508 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="082c9964-6925-4d67-b4fc-33f56c957dda" containerName="extract-utilities" Sep 29 21:58:36 crc kubenswrapper[4911]: E0929 21:58:36.765531 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4e66ca0-22c9-44b1-88fc-35b469649d8a" containerName="registry-server" Sep 29 21:58:36 crc kubenswrapper[4911]: I0929 21:58:36.765544 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4e66ca0-22c9-44b1-88fc-35b469649d8a" containerName="registry-server" Sep 29 21:58:36 crc kubenswrapper[4911]: E0929 21:58:36.765580 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b40f1414-088a-40e3-a07c-041c6e461771" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Sep 29 21:58:36 crc kubenswrapper[4911]: I0929 21:58:36.765594 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="b40f1414-088a-40e3-a07c-041c6e461771" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Sep 29 21:58:36 crc kubenswrapper[4911]: E0929 21:58:36.765614 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="714a8b68-402c-4f86-b995-ac8cc8849b21" containerName="registry-server" Sep 29 21:58:36 crc kubenswrapper[4911]: I0929 21:58:36.765625 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="714a8b68-402c-4f86-b995-ac8cc8849b21" containerName="registry-server" Sep 29 21:58:36 crc kubenswrapper[4911]: I0929 21:58:36.765985 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="b40f1414-088a-40e3-a07c-041c6e461771" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Sep 29 21:58:36 crc kubenswrapper[4911]: I0929 21:58:36.766017 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4e66ca0-22c9-44b1-88fc-35b469649d8a" containerName="registry-server" Sep 29 21:58:36 crc kubenswrapper[4911]: I0929 21:58:36.766036 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="082c9964-6925-4d67-b4fc-33f56c957dda" containerName="registry-server" Sep 29 21:58:36 crc kubenswrapper[4911]: I0929 21:58:36.766065 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="714a8b68-402c-4f86-b995-ac8cc8849b21" containerName="registry-server" Sep 29 21:58:36 crc kubenswrapper[4911]: I0929 21:58:36.767050 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-xbkr4" Sep 29 21:58:36 crc kubenswrapper[4911]: I0929 21:58:36.769403 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-h4cgt" Sep 29 21:58:36 crc kubenswrapper[4911]: I0929 21:58:36.772274 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Sep 29 21:58:36 crc kubenswrapper[4911]: I0929 21:58:36.772507 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 21:58:36 crc kubenswrapper[4911]: I0929 21:58:36.772647 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 21:58:36 crc kubenswrapper[4911]: I0929 21:58:36.774254 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 21:58:36 crc kubenswrapper[4911]: I0929 21:58:36.792746 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-xbkr4"] Sep 29 21:58:36 crc kubenswrapper[4911]: I0929 21:58:36.938765 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/840af368-3414-4fe0-915a-5629b81bbdf4-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-xbkr4\" (UID: \"840af368-3414-4fe0-915a-5629b81bbdf4\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-xbkr4" Sep 29 21:58:36 crc kubenswrapper[4911]: I0929 21:58:36.939259 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qfnqq\" (UniqueName: \"kubernetes.io/projected/840af368-3414-4fe0-915a-5629b81bbdf4-kube-api-access-qfnqq\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-xbkr4\" (UID: \"840af368-3414-4fe0-915a-5629b81bbdf4\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-xbkr4" Sep 29 21:58:36 crc kubenswrapper[4911]: I0929 21:58:36.939510 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/840af368-3414-4fe0-915a-5629b81bbdf4-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-xbkr4\" (UID: \"840af368-3414-4fe0-915a-5629b81bbdf4\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-xbkr4" Sep 29 21:58:36 crc kubenswrapper[4911]: I0929 21:58:36.939695 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/840af368-3414-4fe0-915a-5629b81bbdf4-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-xbkr4\" (UID: \"840af368-3414-4fe0-915a-5629b81bbdf4\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-xbkr4" Sep 29 21:58:36 crc kubenswrapper[4911]: I0929 21:58:36.939858 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/840af368-3414-4fe0-915a-5629b81bbdf4-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-xbkr4\" (UID: \"840af368-3414-4fe0-915a-5629b81bbdf4\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-xbkr4" Sep 29 21:58:37 crc kubenswrapper[4911]: I0929 21:58:37.043313 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/840af368-3414-4fe0-915a-5629b81bbdf4-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-xbkr4\" (UID: \"840af368-3414-4fe0-915a-5629b81bbdf4\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-xbkr4" Sep 29 21:58:37 crc kubenswrapper[4911]: I0929 21:58:37.043391 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/840af368-3414-4fe0-915a-5629b81bbdf4-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-xbkr4\" (UID: \"840af368-3414-4fe0-915a-5629b81bbdf4\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-xbkr4" Sep 29 21:58:37 crc kubenswrapper[4911]: I0929 21:58:37.043428 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/840af368-3414-4fe0-915a-5629b81bbdf4-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-xbkr4\" (UID: \"840af368-3414-4fe0-915a-5629b81bbdf4\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-xbkr4" Sep 29 21:58:37 crc kubenswrapper[4911]: I0929 21:58:37.043494 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/840af368-3414-4fe0-915a-5629b81bbdf4-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-xbkr4\" (UID: \"840af368-3414-4fe0-915a-5629b81bbdf4\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-xbkr4" Sep 29 21:58:37 crc kubenswrapper[4911]: I0929 21:58:37.043612 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qfnqq\" (UniqueName: \"kubernetes.io/projected/840af368-3414-4fe0-915a-5629b81bbdf4-kube-api-access-qfnqq\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-xbkr4\" (UID: \"840af368-3414-4fe0-915a-5629b81bbdf4\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-xbkr4" Sep 29 21:58:37 crc kubenswrapper[4911]: I0929 21:58:37.047783 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/840af368-3414-4fe0-915a-5629b81bbdf4-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-xbkr4\" (UID: \"840af368-3414-4fe0-915a-5629b81bbdf4\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-xbkr4" Sep 29 21:58:37 crc kubenswrapper[4911]: I0929 21:58:37.048646 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/840af368-3414-4fe0-915a-5629b81bbdf4-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-xbkr4\" (UID: \"840af368-3414-4fe0-915a-5629b81bbdf4\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-xbkr4" Sep 29 21:58:37 crc kubenswrapper[4911]: I0929 21:58:37.053120 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/840af368-3414-4fe0-915a-5629b81bbdf4-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-xbkr4\" (UID: \"840af368-3414-4fe0-915a-5629b81bbdf4\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-xbkr4" Sep 29 21:58:37 crc kubenswrapper[4911]: I0929 21:58:37.054552 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/840af368-3414-4fe0-915a-5629b81bbdf4-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-xbkr4\" (UID: \"840af368-3414-4fe0-915a-5629b81bbdf4\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-xbkr4" Sep 29 21:58:37 crc kubenswrapper[4911]: I0929 21:58:37.071427 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qfnqq\" (UniqueName: \"kubernetes.io/projected/840af368-3414-4fe0-915a-5629b81bbdf4-kube-api-access-qfnqq\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-xbkr4\" (UID: \"840af368-3414-4fe0-915a-5629b81bbdf4\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-xbkr4" Sep 29 21:58:37 crc kubenswrapper[4911]: I0929 21:58:37.092027 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-xbkr4" Sep 29 21:58:37 crc kubenswrapper[4911]: I0929 21:58:37.655560 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-xbkr4"] Sep 29 21:58:37 crc kubenswrapper[4911]: W0929 21:58:37.659207 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod840af368_3414_4fe0_915a_5629b81bbdf4.slice/crio-d3650dbaffe23fa4c56c00b22945c24c8d16cdf9234a04c43ba97d980e9e2062 WatchSource:0}: Error finding container d3650dbaffe23fa4c56c00b22945c24c8d16cdf9234a04c43ba97d980e9e2062: Status 404 returned error can't find the container with id d3650dbaffe23fa4c56c00b22945c24c8d16cdf9234a04c43ba97d980e9e2062 Sep 29 21:58:38 crc kubenswrapper[4911]: I0929 21:58:38.654709 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-xbkr4" event={"ID":"840af368-3414-4fe0-915a-5629b81bbdf4","Type":"ContainerStarted","Data":"7601af5193bccf2931c9fb691e8bb165cbf626b11974c1bc5e77f38120046b2a"} Sep 29 21:58:38 crc kubenswrapper[4911]: I0929 21:58:38.655298 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-xbkr4" event={"ID":"840af368-3414-4fe0-915a-5629b81bbdf4","Type":"ContainerStarted","Data":"d3650dbaffe23fa4c56c00b22945c24c8d16cdf9234a04c43ba97d980e9e2062"} Sep 29 21:58:38 crc kubenswrapper[4911]: I0929 21:58:38.708249 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-xbkr4" podStartSLOduration=2.312718415 podStartE2EDuration="2.708221234s" podCreationTimestamp="2025-09-29 21:58:36 +0000 UTC" firstStartedPulling="2025-09-29 21:58:37.661833684 +0000 UTC m=+1995.638946355" lastFinishedPulling="2025-09-29 21:58:38.057336503 +0000 UTC m=+1996.034449174" observedRunningTime="2025-09-29 21:58:38.688440446 +0000 UTC m=+1996.665553167" watchObservedRunningTime="2025-09-29 21:58:38.708221234 +0000 UTC m=+1996.685333945" Sep 29 22:00:00 crc kubenswrapper[4911]: I0929 22:00:00.149594 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319720-twrbw"] Sep 29 22:00:00 crc kubenswrapper[4911]: I0929 22:00:00.151393 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319720-twrbw" Sep 29 22:00:00 crc kubenswrapper[4911]: I0929 22:00:00.158177 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 29 22:00:00 crc kubenswrapper[4911]: I0929 22:00:00.159051 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 29 22:00:00 crc kubenswrapper[4911]: I0929 22:00:00.185230 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319720-twrbw"] Sep 29 22:00:00 crc kubenswrapper[4911]: I0929 22:00:00.277883 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wq7sg\" (UniqueName: \"kubernetes.io/projected/6018bcf9-f212-4a70-bdd4-5aa54d1a10c5-kube-api-access-wq7sg\") pod \"collect-profiles-29319720-twrbw\" (UID: \"6018bcf9-f212-4a70-bdd4-5aa54d1a10c5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319720-twrbw" Sep 29 22:00:00 crc kubenswrapper[4911]: I0929 22:00:00.277989 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6018bcf9-f212-4a70-bdd4-5aa54d1a10c5-secret-volume\") pod \"collect-profiles-29319720-twrbw\" (UID: \"6018bcf9-f212-4a70-bdd4-5aa54d1a10c5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319720-twrbw" Sep 29 22:00:00 crc kubenswrapper[4911]: I0929 22:00:00.278286 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6018bcf9-f212-4a70-bdd4-5aa54d1a10c5-config-volume\") pod \"collect-profiles-29319720-twrbw\" (UID: \"6018bcf9-f212-4a70-bdd4-5aa54d1a10c5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319720-twrbw" Sep 29 22:00:00 crc kubenswrapper[4911]: I0929 22:00:00.379885 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wq7sg\" (UniqueName: \"kubernetes.io/projected/6018bcf9-f212-4a70-bdd4-5aa54d1a10c5-kube-api-access-wq7sg\") pod \"collect-profiles-29319720-twrbw\" (UID: \"6018bcf9-f212-4a70-bdd4-5aa54d1a10c5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319720-twrbw" Sep 29 22:00:00 crc kubenswrapper[4911]: I0929 22:00:00.380015 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6018bcf9-f212-4a70-bdd4-5aa54d1a10c5-secret-volume\") pod \"collect-profiles-29319720-twrbw\" (UID: \"6018bcf9-f212-4a70-bdd4-5aa54d1a10c5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319720-twrbw" Sep 29 22:00:00 crc kubenswrapper[4911]: I0929 22:00:00.380190 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6018bcf9-f212-4a70-bdd4-5aa54d1a10c5-config-volume\") pod \"collect-profiles-29319720-twrbw\" (UID: \"6018bcf9-f212-4a70-bdd4-5aa54d1a10c5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319720-twrbw" Sep 29 22:00:00 crc kubenswrapper[4911]: I0929 22:00:00.381204 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6018bcf9-f212-4a70-bdd4-5aa54d1a10c5-config-volume\") pod \"collect-profiles-29319720-twrbw\" (UID: \"6018bcf9-f212-4a70-bdd4-5aa54d1a10c5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319720-twrbw" Sep 29 22:00:00 crc kubenswrapper[4911]: I0929 22:00:00.387969 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6018bcf9-f212-4a70-bdd4-5aa54d1a10c5-secret-volume\") pod \"collect-profiles-29319720-twrbw\" (UID: \"6018bcf9-f212-4a70-bdd4-5aa54d1a10c5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319720-twrbw" Sep 29 22:00:00 crc kubenswrapper[4911]: I0929 22:00:00.395718 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wq7sg\" (UniqueName: \"kubernetes.io/projected/6018bcf9-f212-4a70-bdd4-5aa54d1a10c5-kube-api-access-wq7sg\") pod \"collect-profiles-29319720-twrbw\" (UID: \"6018bcf9-f212-4a70-bdd4-5aa54d1a10c5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319720-twrbw" Sep 29 22:00:00 crc kubenswrapper[4911]: I0929 22:00:00.516784 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319720-twrbw" Sep 29 22:00:00 crc kubenswrapper[4911]: I0929 22:00:00.808084 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319720-twrbw"] Sep 29 22:00:01 crc kubenswrapper[4911]: I0929 22:00:01.562149 4911 generic.go:334] "Generic (PLEG): container finished" podID="6018bcf9-f212-4a70-bdd4-5aa54d1a10c5" containerID="ef088d83d1316b130d25b135d8334c0c0e039667e033d62722a538a960844375" exitCode=0 Sep 29 22:00:01 crc kubenswrapper[4911]: I0929 22:00:01.562225 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319720-twrbw" event={"ID":"6018bcf9-f212-4a70-bdd4-5aa54d1a10c5","Type":"ContainerDied","Data":"ef088d83d1316b130d25b135d8334c0c0e039667e033d62722a538a960844375"} Sep 29 22:00:01 crc kubenswrapper[4911]: I0929 22:00:01.562489 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319720-twrbw" event={"ID":"6018bcf9-f212-4a70-bdd4-5aa54d1a10c5","Type":"ContainerStarted","Data":"c4d2be1924b27ffdce9d0d90e6660ce2ee9ed705b312409a30c9aa7c96e0cd89"} Sep 29 22:00:02 crc kubenswrapper[4911]: I0929 22:00:02.942248 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319720-twrbw" Sep 29 22:00:03 crc kubenswrapper[4911]: I0929 22:00:03.041768 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6018bcf9-f212-4a70-bdd4-5aa54d1a10c5-config-volume\") pod \"6018bcf9-f212-4a70-bdd4-5aa54d1a10c5\" (UID: \"6018bcf9-f212-4a70-bdd4-5aa54d1a10c5\") " Sep 29 22:00:03 crc kubenswrapper[4911]: I0929 22:00:03.041962 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6018bcf9-f212-4a70-bdd4-5aa54d1a10c5-secret-volume\") pod \"6018bcf9-f212-4a70-bdd4-5aa54d1a10c5\" (UID: \"6018bcf9-f212-4a70-bdd4-5aa54d1a10c5\") " Sep 29 22:00:03 crc kubenswrapper[4911]: I0929 22:00:03.042079 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wq7sg\" (UniqueName: \"kubernetes.io/projected/6018bcf9-f212-4a70-bdd4-5aa54d1a10c5-kube-api-access-wq7sg\") pod \"6018bcf9-f212-4a70-bdd4-5aa54d1a10c5\" (UID: \"6018bcf9-f212-4a70-bdd4-5aa54d1a10c5\") " Sep 29 22:00:03 crc kubenswrapper[4911]: I0929 22:00:03.042259 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6018bcf9-f212-4a70-bdd4-5aa54d1a10c5-config-volume" (OuterVolumeSpecName: "config-volume") pod "6018bcf9-f212-4a70-bdd4-5aa54d1a10c5" (UID: "6018bcf9-f212-4a70-bdd4-5aa54d1a10c5"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:00:03 crc kubenswrapper[4911]: I0929 22:00:03.042511 4911 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6018bcf9-f212-4a70-bdd4-5aa54d1a10c5-config-volume\") on node \"crc\" DevicePath \"\"" Sep 29 22:00:03 crc kubenswrapper[4911]: I0929 22:00:03.061361 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6018bcf9-f212-4a70-bdd4-5aa54d1a10c5-kube-api-access-wq7sg" (OuterVolumeSpecName: "kube-api-access-wq7sg") pod "6018bcf9-f212-4a70-bdd4-5aa54d1a10c5" (UID: "6018bcf9-f212-4a70-bdd4-5aa54d1a10c5"). InnerVolumeSpecName "kube-api-access-wq7sg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:00:03 crc kubenswrapper[4911]: I0929 22:00:03.062351 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6018bcf9-f212-4a70-bdd4-5aa54d1a10c5-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "6018bcf9-f212-4a70-bdd4-5aa54d1a10c5" (UID: "6018bcf9-f212-4a70-bdd4-5aa54d1a10c5"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:00:03 crc kubenswrapper[4911]: I0929 22:00:03.145361 4911 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6018bcf9-f212-4a70-bdd4-5aa54d1a10c5-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 29 22:00:03 crc kubenswrapper[4911]: I0929 22:00:03.145403 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wq7sg\" (UniqueName: \"kubernetes.io/projected/6018bcf9-f212-4a70-bdd4-5aa54d1a10c5-kube-api-access-wq7sg\") on node \"crc\" DevicePath \"\"" Sep 29 22:00:03 crc kubenswrapper[4911]: I0929 22:00:03.580083 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319720-twrbw" event={"ID":"6018bcf9-f212-4a70-bdd4-5aa54d1a10c5","Type":"ContainerDied","Data":"c4d2be1924b27ffdce9d0d90e6660ce2ee9ed705b312409a30c9aa7c96e0cd89"} Sep 29 22:00:03 crc kubenswrapper[4911]: I0929 22:00:03.580126 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319720-twrbw" Sep 29 22:00:03 crc kubenswrapper[4911]: I0929 22:00:03.580132 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c4d2be1924b27ffdce9d0d90e6660ce2ee9ed705b312409a30c9aa7c96e0cd89" Sep 29 22:00:04 crc kubenswrapper[4911]: I0929 22:00:04.044674 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319675-qdtss"] Sep 29 22:00:04 crc kubenswrapper[4911]: I0929 22:00:04.053650 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319675-qdtss"] Sep 29 22:00:04 crc kubenswrapper[4911]: I0929 22:00:04.717319 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2ad88e09-2e13-4070-9c1f-75cb9dd12ebf" path="/var/lib/kubelet/pods/2ad88e09-2e13-4070-9c1f-75cb9dd12ebf/volumes" Sep 29 22:00:25 crc kubenswrapper[4911]: I0929 22:00:25.211365 4911 patch_prober.go:28] interesting pod/machine-config-daemon-w647f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 22:00:25 crc kubenswrapper[4911]: I0929 22:00:25.212172 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 22:00:38 crc kubenswrapper[4911]: I0929 22:00:38.229960 4911 scope.go:117] "RemoveContainer" containerID="4338ac5cad4c96f3f2896e8f12a7c11398b9594037fa7d204caf3949624f5c8e" Sep 29 22:00:39 crc kubenswrapper[4911]: I0929 22:00:39.641920 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-hbqwz"] Sep 29 22:00:39 crc kubenswrapper[4911]: E0929 22:00:39.642634 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6018bcf9-f212-4a70-bdd4-5aa54d1a10c5" containerName="collect-profiles" Sep 29 22:00:39 crc kubenswrapper[4911]: I0929 22:00:39.642651 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="6018bcf9-f212-4a70-bdd4-5aa54d1a10c5" containerName="collect-profiles" Sep 29 22:00:39 crc kubenswrapper[4911]: I0929 22:00:39.642941 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="6018bcf9-f212-4a70-bdd4-5aa54d1a10c5" containerName="collect-profiles" Sep 29 22:00:39 crc kubenswrapper[4911]: I0929 22:00:39.644455 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hbqwz" Sep 29 22:00:39 crc kubenswrapper[4911]: I0929 22:00:39.675942 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hbqwz"] Sep 29 22:00:39 crc kubenswrapper[4911]: I0929 22:00:39.812808 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/596b63ad-af1a-4da4-ac03-296dacac7a07-utilities\") pod \"certified-operators-hbqwz\" (UID: \"596b63ad-af1a-4da4-ac03-296dacac7a07\") " pod="openshift-marketplace/certified-operators-hbqwz" Sep 29 22:00:39 crc kubenswrapper[4911]: I0929 22:00:39.813025 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/596b63ad-af1a-4da4-ac03-296dacac7a07-catalog-content\") pod \"certified-operators-hbqwz\" (UID: \"596b63ad-af1a-4da4-ac03-296dacac7a07\") " pod="openshift-marketplace/certified-operators-hbqwz" Sep 29 22:00:39 crc kubenswrapper[4911]: I0929 22:00:39.813192 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4zkzz\" (UniqueName: \"kubernetes.io/projected/596b63ad-af1a-4da4-ac03-296dacac7a07-kube-api-access-4zkzz\") pod \"certified-operators-hbqwz\" (UID: \"596b63ad-af1a-4da4-ac03-296dacac7a07\") " pod="openshift-marketplace/certified-operators-hbqwz" Sep 29 22:00:39 crc kubenswrapper[4911]: I0929 22:00:39.915782 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/596b63ad-af1a-4da4-ac03-296dacac7a07-utilities\") pod \"certified-operators-hbqwz\" (UID: \"596b63ad-af1a-4da4-ac03-296dacac7a07\") " pod="openshift-marketplace/certified-operators-hbqwz" Sep 29 22:00:39 crc kubenswrapper[4911]: I0929 22:00:39.915853 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/596b63ad-af1a-4da4-ac03-296dacac7a07-catalog-content\") pod \"certified-operators-hbqwz\" (UID: \"596b63ad-af1a-4da4-ac03-296dacac7a07\") " pod="openshift-marketplace/certified-operators-hbqwz" Sep 29 22:00:39 crc kubenswrapper[4911]: I0929 22:00:39.915921 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4zkzz\" (UniqueName: \"kubernetes.io/projected/596b63ad-af1a-4da4-ac03-296dacac7a07-kube-api-access-4zkzz\") pod \"certified-operators-hbqwz\" (UID: \"596b63ad-af1a-4da4-ac03-296dacac7a07\") " pod="openshift-marketplace/certified-operators-hbqwz" Sep 29 22:00:39 crc kubenswrapper[4911]: I0929 22:00:39.916232 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/596b63ad-af1a-4da4-ac03-296dacac7a07-utilities\") pod \"certified-operators-hbqwz\" (UID: \"596b63ad-af1a-4da4-ac03-296dacac7a07\") " pod="openshift-marketplace/certified-operators-hbqwz" Sep 29 22:00:39 crc kubenswrapper[4911]: I0929 22:00:39.916450 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/596b63ad-af1a-4da4-ac03-296dacac7a07-catalog-content\") pod \"certified-operators-hbqwz\" (UID: \"596b63ad-af1a-4da4-ac03-296dacac7a07\") " pod="openshift-marketplace/certified-operators-hbqwz" Sep 29 22:00:39 crc kubenswrapper[4911]: I0929 22:00:39.936120 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4zkzz\" (UniqueName: \"kubernetes.io/projected/596b63ad-af1a-4da4-ac03-296dacac7a07-kube-api-access-4zkzz\") pod \"certified-operators-hbqwz\" (UID: \"596b63ad-af1a-4da4-ac03-296dacac7a07\") " pod="openshift-marketplace/certified-operators-hbqwz" Sep 29 22:00:39 crc kubenswrapper[4911]: I0929 22:00:39.978885 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hbqwz" Sep 29 22:00:40 crc kubenswrapper[4911]: I0929 22:00:40.511949 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hbqwz"] Sep 29 22:00:40 crc kubenswrapper[4911]: W0929 22:00:40.514297 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod596b63ad_af1a_4da4_ac03_296dacac7a07.slice/crio-fcd04c998c8a17656adda00d1ac2b2ac65d854f525020a1b67644ae045a68bc8 WatchSource:0}: Error finding container fcd04c998c8a17656adda00d1ac2b2ac65d854f525020a1b67644ae045a68bc8: Status 404 returned error can't find the container with id fcd04c998c8a17656adda00d1ac2b2ac65d854f525020a1b67644ae045a68bc8 Sep 29 22:00:40 crc kubenswrapper[4911]: I0929 22:00:40.960882 4911 generic.go:334] "Generic (PLEG): container finished" podID="596b63ad-af1a-4da4-ac03-296dacac7a07" containerID="5e88b0c478e2ca9a2bc5af34881745bb9d2e7dfda54d6225fa07a5e446bce645" exitCode=0 Sep 29 22:00:40 crc kubenswrapper[4911]: I0929 22:00:40.960930 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hbqwz" event={"ID":"596b63ad-af1a-4da4-ac03-296dacac7a07","Type":"ContainerDied","Data":"5e88b0c478e2ca9a2bc5af34881745bb9d2e7dfda54d6225fa07a5e446bce645"} Sep 29 22:00:40 crc kubenswrapper[4911]: I0929 22:00:40.960958 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hbqwz" event={"ID":"596b63ad-af1a-4da4-ac03-296dacac7a07","Type":"ContainerStarted","Data":"fcd04c998c8a17656adda00d1ac2b2ac65d854f525020a1b67644ae045a68bc8"} Sep 29 22:00:40 crc kubenswrapper[4911]: I0929 22:00:40.963726 4911 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 22:00:41 crc kubenswrapper[4911]: I0929 22:00:41.972842 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hbqwz" event={"ID":"596b63ad-af1a-4da4-ac03-296dacac7a07","Type":"ContainerStarted","Data":"0662706edbfb68f349a0078080f4e9c62910d26bb3c8c2f14daf76cbc10db74b"} Sep 29 22:00:42 crc kubenswrapper[4911]: I0929 22:00:42.988942 4911 generic.go:334] "Generic (PLEG): container finished" podID="596b63ad-af1a-4da4-ac03-296dacac7a07" containerID="0662706edbfb68f349a0078080f4e9c62910d26bb3c8c2f14daf76cbc10db74b" exitCode=0 Sep 29 22:00:42 crc kubenswrapper[4911]: I0929 22:00:42.989044 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hbqwz" event={"ID":"596b63ad-af1a-4da4-ac03-296dacac7a07","Type":"ContainerDied","Data":"0662706edbfb68f349a0078080f4e9c62910d26bb3c8c2f14daf76cbc10db74b"} Sep 29 22:00:44 crc kubenswrapper[4911]: I0929 22:00:44.007899 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hbqwz" event={"ID":"596b63ad-af1a-4da4-ac03-296dacac7a07","Type":"ContainerStarted","Data":"6acd819a9c4a05ec300cb06e68b8d8584efefa9005cd3ee053e166548ea0b50c"} Sep 29 22:00:44 crc kubenswrapper[4911]: I0929 22:00:44.029967 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-hbqwz" podStartSLOduration=2.571170319 podStartE2EDuration="5.029945826s" podCreationTimestamp="2025-09-29 22:00:39 +0000 UTC" firstStartedPulling="2025-09-29 22:00:40.963328644 +0000 UTC m=+2118.940441345" lastFinishedPulling="2025-09-29 22:00:43.422104141 +0000 UTC m=+2121.399216852" observedRunningTime="2025-09-29 22:00:44.026949332 +0000 UTC m=+2122.004062053" watchObservedRunningTime="2025-09-29 22:00:44.029945826 +0000 UTC m=+2122.007058517" Sep 29 22:00:49 crc kubenswrapper[4911]: I0929 22:00:49.980425 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-hbqwz" Sep 29 22:00:49 crc kubenswrapper[4911]: I0929 22:00:49.980978 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-hbqwz" Sep 29 22:00:50 crc kubenswrapper[4911]: I0929 22:00:50.030400 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-hbqwz" Sep 29 22:00:50 crc kubenswrapper[4911]: I0929 22:00:50.124646 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-hbqwz" Sep 29 22:00:50 crc kubenswrapper[4911]: I0929 22:00:50.284101 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hbqwz"] Sep 29 22:00:52 crc kubenswrapper[4911]: I0929 22:00:52.086456 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-hbqwz" podUID="596b63ad-af1a-4da4-ac03-296dacac7a07" containerName="registry-server" containerID="cri-o://6acd819a9c4a05ec300cb06e68b8d8584efefa9005cd3ee053e166548ea0b50c" gracePeriod=2 Sep 29 22:00:52 crc kubenswrapper[4911]: I0929 22:00:52.604103 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hbqwz" Sep 29 22:00:52 crc kubenswrapper[4911]: I0929 22:00:52.778491 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/596b63ad-af1a-4da4-ac03-296dacac7a07-utilities\") pod \"596b63ad-af1a-4da4-ac03-296dacac7a07\" (UID: \"596b63ad-af1a-4da4-ac03-296dacac7a07\") " Sep 29 22:00:52 crc kubenswrapper[4911]: I0929 22:00:52.778602 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4zkzz\" (UniqueName: \"kubernetes.io/projected/596b63ad-af1a-4da4-ac03-296dacac7a07-kube-api-access-4zkzz\") pod \"596b63ad-af1a-4da4-ac03-296dacac7a07\" (UID: \"596b63ad-af1a-4da4-ac03-296dacac7a07\") " Sep 29 22:00:52 crc kubenswrapper[4911]: I0929 22:00:52.778700 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/596b63ad-af1a-4da4-ac03-296dacac7a07-catalog-content\") pod \"596b63ad-af1a-4da4-ac03-296dacac7a07\" (UID: \"596b63ad-af1a-4da4-ac03-296dacac7a07\") " Sep 29 22:00:52 crc kubenswrapper[4911]: I0929 22:00:52.779487 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/596b63ad-af1a-4da4-ac03-296dacac7a07-utilities" (OuterVolumeSpecName: "utilities") pod "596b63ad-af1a-4da4-ac03-296dacac7a07" (UID: "596b63ad-af1a-4da4-ac03-296dacac7a07"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:00:52 crc kubenswrapper[4911]: I0929 22:00:52.786068 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/596b63ad-af1a-4da4-ac03-296dacac7a07-kube-api-access-4zkzz" (OuterVolumeSpecName: "kube-api-access-4zkzz") pod "596b63ad-af1a-4da4-ac03-296dacac7a07" (UID: "596b63ad-af1a-4da4-ac03-296dacac7a07"). InnerVolumeSpecName "kube-api-access-4zkzz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:00:52 crc kubenswrapper[4911]: I0929 22:00:52.819102 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/596b63ad-af1a-4da4-ac03-296dacac7a07-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "596b63ad-af1a-4da4-ac03-296dacac7a07" (UID: "596b63ad-af1a-4da4-ac03-296dacac7a07"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:00:52 crc kubenswrapper[4911]: I0929 22:00:52.881195 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/596b63ad-af1a-4da4-ac03-296dacac7a07-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 22:00:52 crc kubenswrapper[4911]: I0929 22:00:52.881225 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4zkzz\" (UniqueName: \"kubernetes.io/projected/596b63ad-af1a-4da4-ac03-296dacac7a07-kube-api-access-4zkzz\") on node \"crc\" DevicePath \"\"" Sep 29 22:00:52 crc kubenswrapper[4911]: I0929 22:00:52.881235 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/596b63ad-af1a-4da4-ac03-296dacac7a07-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 22:00:53 crc kubenswrapper[4911]: I0929 22:00:53.106643 4911 generic.go:334] "Generic (PLEG): container finished" podID="596b63ad-af1a-4da4-ac03-296dacac7a07" containerID="6acd819a9c4a05ec300cb06e68b8d8584efefa9005cd3ee053e166548ea0b50c" exitCode=0 Sep 29 22:00:53 crc kubenswrapper[4911]: I0929 22:00:53.106707 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hbqwz" event={"ID":"596b63ad-af1a-4da4-ac03-296dacac7a07","Type":"ContainerDied","Data":"6acd819a9c4a05ec300cb06e68b8d8584efefa9005cd3ee053e166548ea0b50c"} Sep 29 22:00:53 crc kubenswrapper[4911]: I0929 22:00:53.106743 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hbqwz" Sep 29 22:00:53 crc kubenswrapper[4911]: I0929 22:00:53.106815 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hbqwz" event={"ID":"596b63ad-af1a-4da4-ac03-296dacac7a07","Type":"ContainerDied","Data":"fcd04c998c8a17656adda00d1ac2b2ac65d854f525020a1b67644ae045a68bc8"} Sep 29 22:00:53 crc kubenswrapper[4911]: I0929 22:00:53.106850 4911 scope.go:117] "RemoveContainer" containerID="6acd819a9c4a05ec300cb06e68b8d8584efefa9005cd3ee053e166548ea0b50c" Sep 29 22:00:53 crc kubenswrapper[4911]: I0929 22:00:53.161368 4911 scope.go:117] "RemoveContainer" containerID="0662706edbfb68f349a0078080f4e9c62910d26bb3c8c2f14daf76cbc10db74b" Sep 29 22:00:53 crc kubenswrapper[4911]: I0929 22:00:53.183030 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hbqwz"] Sep 29 22:00:53 crc kubenswrapper[4911]: I0929 22:00:53.193673 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-hbqwz"] Sep 29 22:00:53 crc kubenswrapper[4911]: I0929 22:00:53.222257 4911 scope.go:117] "RemoveContainer" containerID="5e88b0c478e2ca9a2bc5af34881745bb9d2e7dfda54d6225fa07a5e446bce645" Sep 29 22:00:53 crc kubenswrapper[4911]: I0929 22:00:53.240582 4911 scope.go:117] "RemoveContainer" containerID="6acd819a9c4a05ec300cb06e68b8d8584efefa9005cd3ee053e166548ea0b50c" Sep 29 22:00:53 crc kubenswrapper[4911]: E0929 22:00:53.241162 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6acd819a9c4a05ec300cb06e68b8d8584efefa9005cd3ee053e166548ea0b50c\": container with ID starting with 6acd819a9c4a05ec300cb06e68b8d8584efefa9005cd3ee053e166548ea0b50c not found: ID does not exist" containerID="6acd819a9c4a05ec300cb06e68b8d8584efefa9005cd3ee053e166548ea0b50c" Sep 29 22:00:53 crc kubenswrapper[4911]: I0929 22:00:53.241251 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6acd819a9c4a05ec300cb06e68b8d8584efefa9005cd3ee053e166548ea0b50c"} err="failed to get container status \"6acd819a9c4a05ec300cb06e68b8d8584efefa9005cd3ee053e166548ea0b50c\": rpc error: code = NotFound desc = could not find container \"6acd819a9c4a05ec300cb06e68b8d8584efefa9005cd3ee053e166548ea0b50c\": container with ID starting with 6acd819a9c4a05ec300cb06e68b8d8584efefa9005cd3ee053e166548ea0b50c not found: ID does not exist" Sep 29 22:00:53 crc kubenswrapper[4911]: I0929 22:00:53.241320 4911 scope.go:117] "RemoveContainer" containerID="0662706edbfb68f349a0078080f4e9c62910d26bb3c8c2f14daf76cbc10db74b" Sep 29 22:00:53 crc kubenswrapper[4911]: E0929 22:00:53.241669 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0662706edbfb68f349a0078080f4e9c62910d26bb3c8c2f14daf76cbc10db74b\": container with ID starting with 0662706edbfb68f349a0078080f4e9c62910d26bb3c8c2f14daf76cbc10db74b not found: ID does not exist" containerID="0662706edbfb68f349a0078080f4e9c62910d26bb3c8c2f14daf76cbc10db74b" Sep 29 22:00:53 crc kubenswrapper[4911]: I0929 22:00:53.241696 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0662706edbfb68f349a0078080f4e9c62910d26bb3c8c2f14daf76cbc10db74b"} err="failed to get container status \"0662706edbfb68f349a0078080f4e9c62910d26bb3c8c2f14daf76cbc10db74b\": rpc error: code = NotFound desc = could not find container \"0662706edbfb68f349a0078080f4e9c62910d26bb3c8c2f14daf76cbc10db74b\": container with ID starting with 0662706edbfb68f349a0078080f4e9c62910d26bb3c8c2f14daf76cbc10db74b not found: ID does not exist" Sep 29 22:00:53 crc kubenswrapper[4911]: I0929 22:00:53.241721 4911 scope.go:117] "RemoveContainer" containerID="5e88b0c478e2ca9a2bc5af34881745bb9d2e7dfda54d6225fa07a5e446bce645" Sep 29 22:00:53 crc kubenswrapper[4911]: E0929 22:00:53.242133 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5e88b0c478e2ca9a2bc5af34881745bb9d2e7dfda54d6225fa07a5e446bce645\": container with ID starting with 5e88b0c478e2ca9a2bc5af34881745bb9d2e7dfda54d6225fa07a5e446bce645 not found: ID does not exist" containerID="5e88b0c478e2ca9a2bc5af34881745bb9d2e7dfda54d6225fa07a5e446bce645" Sep 29 22:00:53 crc kubenswrapper[4911]: I0929 22:00:53.242185 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5e88b0c478e2ca9a2bc5af34881745bb9d2e7dfda54d6225fa07a5e446bce645"} err="failed to get container status \"5e88b0c478e2ca9a2bc5af34881745bb9d2e7dfda54d6225fa07a5e446bce645\": rpc error: code = NotFound desc = could not find container \"5e88b0c478e2ca9a2bc5af34881745bb9d2e7dfda54d6225fa07a5e446bce645\": container with ID starting with 5e88b0c478e2ca9a2bc5af34881745bb9d2e7dfda54d6225fa07a5e446bce645 not found: ID does not exist" Sep 29 22:00:54 crc kubenswrapper[4911]: I0929 22:00:54.715665 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="596b63ad-af1a-4da4-ac03-296dacac7a07" path="/var/lib/kubelet/pods/596b63ad-af1a-4da4-ac03-296dacac7a07/volumes" Sep 29 22:00:55 crc kubenswrapper[4911]: I0929 22:00:55.211604 4911 patch_prober.go:28] interesting pod/machine-config-daemon-w647f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 22:00:55 crc kubenswrapper[4911]: I0929 22:00:55.211691 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 22:01:00 crc kubenswrapper[4911]: I0929 22:01:00.170403 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29319721-rjzjd"] Sep 29 22:01:00 crc kubenswrapper[4911]: E0929 22:01:00.171362 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="596b63ad-af1a-4da4-ac03-296dacac7a07" containerName="registry-server" Sep 29 22:01:00 crc kubenswrapper[4911]: I0929 22:01:00.171382 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="596b63ad-af1a-4da4-ac03-296dacac7a07" containerName="registry-server" Sep 29 22:01:00 crc kubenswrapper[4911]: E0929 22:01:00.171408 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="596b63ad-af1a-4da4-ac03-296dacac7a07" containerName="extract-content" Sep 29 22:01:00 crc kubenswrapper[4911]: I0929 22:01:00.171416 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="596b63ad-af1a-4da4-ac03-296dacac7a07" containerName="extract-content" Sep 29 22:01:00 crc kubenswrapper[4911]: E0929 22:01:00.171440 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="596b63ad-af1a-4da4-ac03-296dacac7a07" containerName="extract-utilities" Sep 29 22:01:00 crc kubenswrapper[4911]: I0929 22:01:00.171449 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="596b63ad-af1a-4da4-ac03-296dacac7a07" containerName="extract-utilities" Sep 29 22:01:00 crc kubenswrapper[4911]: I0929 22:01:00.171666 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="596b63ad-af1a-4da4-ac03-296dacac7a07" containerName="registry-server" Sep 29 22:01:00 crc kubenswrapper[4911]: I0929 22:01:00.172449 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29319721-rjzjd" Sep 29 22:01:00 crc kubenswrapper[4911]: I0929 22:01:00.200200 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29319721-rjzjd"] Sep 29 22:01:00 crc kubenswrapper[4911]: I0929 22:01:00.344897 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/bbf8467c-b7de-4104-b9a7-59e1d163bfb7-fernet-keys\") pod \"keystone-cron-29319721-rjzjd\" (UID: \"bbf8467c-b7de-4104-b9a7-59e1d163bfb7\") " pod="openstack/keystone-cron-29319721-rjzjd" Sep 29 22:01:00 crc kubenswrapper[4911]: I0929 22:01:00.345222 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bbf8467c-b7de-4104-b9a7-59e1d163bfb7-config-data\") pod \"keystone-cron-29319721-rjzjd\" (UID: \"bbf8467c-b7de-4104-b9a7-59e1d163bfb7\") " pod="openstack/keystone-cron-29319721-rjzjd" Sep 29 22:01:00 crc kubenswrapper[4911]: I0929 22:01:00.345726 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cw5nd\" (UniqueName: \"kubernetes.io/projected/bbf8467c-b7de-4104-b9a7-59e1d163bfb7-kube-api-access-cw5nd\") pod \"keystone-cron-29319721-rjzjd\" (UID: \"bbf8467c-b7de-4104-b9a7-59e1d163bfb7\") " pod="openstack/keystone-cron-29319721-rjzjd" Sep 29 22:01:00 crc kubenswrapper[4911]: I0929 22:01:00.345889 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bbf8467c-b7de-4104-b9a7-59e1d163bfb7-combined-ca-bundle\") pod \"keystone-cron-29319721-rjzjd\" (UID: \"bbf8467c-b7de-4104-b9a7-59e1d163bfb7\") " pod="openstack/keystone-cron-29319721-rjzjd" Sep 29 22:01:00 crc kubenswrapper[4911]: I0929 22:01:00.448295 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cw5nd\" (UniqueName: \"kubernetes.io/projected/bbf8467c-b7de-4104-b9a7-59e1d163bfb7-kube-api-access-cw5nd\") pod \"keystone-cron-29319721-rjzjd\" (UID: \"bbf8467c-b7de-4104-b9a7-59e1d163bfb7\") " pod="openstack/keystone-cron-29319721-rjzjd" Sep 29 22:01:00 crc kubenswrapper[4911]: I0929 22:01:00.448403 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bbf8467c-b7de-4104-b9a7-59e1d163bfb7-combined-ca-bundle\") pod \"keystone-cron-29319721-rjzjd\" (UID: \"bbf8467c-b7de-4104-b9a7-59e1d163bfb7\") " pod="openstack/keystone-cron-29319721-rjzjd" Sep 29 22:01:00 crc kubenswrapper[4911]: I0929 22:01:00.448535 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/bbf8467c-b7de-4104-b9a7-59e1d163bfb7-fernet-keys\") pod \"keystone-cron-29319721-rjzjd\" (UID: \"bbf8467c-b7de-4104-b9a7-59e1d163bfb7\") " pod="openstack/keystone-cron-29319721-rjzjd" Sep 29 22:01:00 crc kubenswrapper[4911]: I0929 22:01:00.448635 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bbf8467c-b7de-4104-b9a7-59e1d163bfb7-config-data\") pod \"keystone-cron-29319721-rjzjd\" (UID: \"bbf8467c-b7de-4104-b9a7-59e1d163bfb7\") " pod="openstack/keystone-cron-29319721-rjzjd" Sep 29 22:01:00 crc kubenswrapper[4911]: I0929 22:01:00.455636 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bbf8467c-b7de-4104-b9a7-59e1d163bfb7-config-data\") pod \"keystone-cron-29319721-rjzjd\" (UID: \"bbf8467c-b7de-4104-b9a7-59e1d163bfb7\") " pod="openstack/keystone-cron-29319721-rjzjd" Sep 29 22:01:00 crc kubenswrapper[4911]: I0929 22:01:00.455860 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/bbf8467c-b7de-4104-b9a7-59e1d163bfb7-fernet-keys\") pod \"keystone-cron-29319721-rjzjd\" (UID: \"bbf8467c-b7de-4104-b9a7-59e1d163bfb7\") " pod="openstack/keystone-cron-29319721-rjzjd" Sep 29 22:01:00 crc kubenswrapper[4911]: I0929 22:01:00.459258 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bbf8467c-b7de-4104-b9a7-59e1d163bfb7-combined-ca-bundle\") pod \"keystone-cron-29319721-rjzjd\" (UID: \"bbf8467c-b7de-4104-b9a7-59e1d163bfb7\") " pod="openstack/keystone-cron-29319721-rjzjd" Sep 29 22:01:00 crc kubenswrapper[4911]: I0929 22:01:00.477465 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cw5nd\" (UniqueName: \"kubernetes.io/projected/bbf8467c-b7de-4104-b9a7-59e1d163bfb7-kube-api-access-cw5nd\") pod \"keystone-cron-29319721-rjzjd\" (UID: \"bbf8467c-b7de-4104-b9a7-59e1d163bfb7\") " pod="openstack/keystone-cron-29319721-rjzjd" Sep 29 22:01:00 crc kubenswrapper[4911]: I0929 22:01:00.500885 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29319721-rjzjd" Sep 29 22:01:00 crc kubenswrapper[4911]: I0929 22:01:00.783354 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29319721-rjzjd"] Sep 29 22:01:01 crc kubenswrapper[4911]: I0929 22:01:01.205520 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29319721-rjzjd" event={"ID":"bbf8467c-b7de-4104-b9a7-59e1d163bfb7","Type":"ContainerStarted","Data":"eba92c1ee54baed1f8cc08746efa7d4f2a1571f184f257331e0beb2190aa0767"} Sep 29 22:01:01 crc kubenswrapper[4911]: I0929 22:01:01.205582 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29319721-rjzjd" event={"ID":"bbf8467c-b7de-4104-b9a7-59e1d163bfb7","Type":"ContainerStarted","Data":"b43f93e7ad7956635b0c350cc4fa1d5bb808a07b85ef6dcf111c8cc4dba59e8e"} Sep 29 22:01:01 crc kubenswrapper[4911]: I0929 22:01:01.229524 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29319721-rjzjd" podStartSLOduration=1.229506955 podStartE2EDuration="1.229506955s" podCreationTimestamp="2025-09-29 22:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:01:01.221545015 +0000 UTC m=+2139.198657686" watchObservedRunningTime="2025-09-29 22:01:01.229506955 +0000 UTC m=+2139.206619626" Sep 29 22:01:03 crc kubenswrapper[4911]: I0929 22:01:03.228016 4911 generic.go:334] "Generic (PLEG): container finished" podID="bbf8467c-b7de-4104-b9a7-59e1d163bfb7" containerID="eba92c1ee54baed1f8cc08746efa7d4f2a1571f184f257331e0beb2190aa0767" exitCode=0 Sep 29 22:01:03 crc kubenswrapper[4911]: I0929 22:01:03.228118 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29319721-rjzjd" event={"ID":"bbf8467c-b7de-4104-b9a7-59e1d163bfb7","Type":"ContainerDied","Data":"eba92c1ee54baed1f8cc08746efa7d4f2a1571f184f257331e0beb2190aa0767"} Sep 29 22:01:04 crc kubenswrapper[4911]: I0929 22:01:04.624214 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29319721-rjzjd" Sep 29 22:01:04 crc kubenswrapper[4911]: I0929 22:01:04.735840 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cw5nd\" (UniqueName: \"kubernetes.io/projected/bbf8467c-b7de-4104-b9a7-59e1d163bfb7-kube-api-access-cw5nd\") pod \"bbf8467c-b7de-4104-b9a7-59e1d163bfb7\" (UID: \"bbf8467c-b7de-4104-b9a7-59e1d163bfb7\") " Sep 29 22:01:04 crc kubenswrapper[4911]: I0929 22:01:04.735904 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bbf8467c-b7de-4104-b9a7-59e1d163bfb7-config-data\") pod \"bbf8467c-b7de-4104-b9a7-59e1d163bfb7\" (UID: \"bbf8467c-b7de-4104-b9a7-59e1d163bfb7\") " Sep 29 22:01:04 crc kubenswrapper[4911]: I0929 22:01:04.736113 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/bbf8467c-b7de-4104-b9a7-59e1d163bfb7-fernet-keys\") pod \"bbf8467c-b7de-4104-b9a7-59e1d163bfb7\" (UID: \"bbf8467c-b7de-4104-b9a7-59e1d163bfb7\") " Sep 29 22:01:04 crc kubenswrapper[4911]: I0929 22:01:04.736786 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bbf8467c-b7de-4104-b9a7-59e1d163bfb7-combined-ca-bundle\") pod \"bbf8467c-b7de-4104-b9a7-59e1d163bfb7\" (UID: \"bbf8467c-b7de-4104-b9a7-59e1d163bfb7\") " Sep 29 22:01:04 crc kubenswrapper[4911]: I0929 22:01:04.741242 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bbf8467c-b7de-4104-b9a7-59e1d163bfb7-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "bbf8467c-b7de-4104-b9a7-59e1d163bfb7" (UID: "bbf8467c-b7de-4104-b9a7-59e1d163bfb7"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:01:04 crc kubenswrapper[4911]: I0929 22:01:04.741513 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bbf8467c-b7de-4104-b9a7-59e1d163bfb7-kube-api-access-cw5nd" (OuterVolumeSpecName: "kube-api-access-cw5nd") pod "bbf8467c-b7de-4104-b9a7-59e1d163bfb7" (UID: "bbf8467c-b7de-4104-b9a7-59e1d163bfb7"). InnerVolumeSpecName "kube-api-access-cw5nd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:01:04 crc kubenswrapper[4911]: I0929 22:01:04.781237 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bbf8467c-b7de-4104-b9a7-59e1d163bfb7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bbf8467c-b7de-4104-b9a7-59e1d163bfb7" (UID: "bbf8467c-b7de-4104-b9a7-59e1d163bfb7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:01:04 crc kubenswrapper[4911]: I0929 22:01:04.819233 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bbf8467c-b7de-4104-b9a7-59e1d163bfb7-config-data" (OuterVolumeSpecName: "config-data") pod "bbf8467c-b7de-4104-b9a7-59e1d163bfb7" (UID: "bbf8467c-b7de-4104-b9a7-59e1d163bfb7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:01:04 crc kubenswrapper[4911]: I0929 22:01:04.839647 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bbf8467c-b7de-4104-b9a7-59e1d163bfb7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:01:04 crc kubenswrapper[4911]: I0929 22:01:04.839675 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cw5nd\" (UniqueName: \"kubernetes.io/projected/bbf8467c-b7de-4104-b9a7-59e1d163bfb7-kube-api-access-cw5nd\") on node \"crc\" DevicePath \"\"" Sep 29 22:01:04 crc kubenswrapper[4911]: I0929 22:01:04.839685 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bbf8467c-b7de-4104-b9a7-59e1d163bfb7-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 22:01:04 crc kubenswrapper[4911]: I0929 22:01:04.839693 4911 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/bbf8467c-b7de-4104-b9a7-59e1d163bfb7-fernet-keys\") on node \"crc\" DevicePath \"\"" Sep 29 22:01:05 crc kubenswrapper[4911]: I0929 22:01:05.254065 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29319721-rjzjd" event={"ID":"bbf8467c-b7de-4104-b9a7-59e1d163bfb7","Type":"ContainerDied","Data":"b43f93e7ad7956635b0c350cc4fa1d5bb808a07b85ef6dcf111c8cc4dba59e8e"} Sep 29 22:01:05 crc kubenswrapper[4911]: I0929 22:01:05.254110 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b43f93e7ad7956635b0c350cc4fa1d5bb808a07b85ef6dcf111c8cc4dba59e8e" Sep 29 22:01:05 crc kubenswrapper[4911]: I0929 22:01:05.254136 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29319721-rjzjd" Sep 29 22:01:25 crc kubenswrapper[4911]: I0929 22:01:25.211323 4911 patch_prober.go:28] interesting pod/machine-config-daemon-w647f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 22:01:25 crc kubenswrapper[4911]: I0929 22:01:25.212045 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 22:01:25 crc kubenswrapper[4911]: I0929 22:01:25.212134 4911 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-w647f" Sep 29 22:01:25 crc kubenswrapper[4911]: I0929 22:01:25.213293 4911 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"36e2ce9b1f454aea68a43b1961f21ef4bddb0194f435c478ced137f34d81bd50"} pod="openshift-machine-config-operator/machine-config-daemon-w647f" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 22:01:25 crc kubenswrapper[4911]: I0929 22:01:25.213393 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" containerName="machine-config-daemon" containerID="cri-o://36e2ce9b1f454aea68a43b1961f21ef4bddb0194f435c478ced137f34d81bd50" gracePeriod=600 Sep 29 22:01:25 crc kubenswrapper[4911]: E0929 22:01:25.352280 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w647f_openshift-machine-config-operator(50640abc-40db-4390-82d1-f3cfc76da71c)\"" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" Sep 29 22:01:25 crc kubenswrapper[4911]: I0929 22:01:25.502301 4911 generic.go:334] "Generic (PLEG): container finished" podID="50640abc-40db-4390-82d1-f3cfc76da71c" containerID="36e2ce9b1f454aea68a43b1961f21ef4bddb0194f435c478ced137f34d81bd50" exitCode=0 Sep 29 22:01:25 crc kubenswrapper[4911]: I0929 22:01:25.502363 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w647f" event={"ID":"50640abc-40db-4390-82d1-f3cfc76da71c","Type":"ContainerDied","Data":"36e2ce9b1f454aea68a43b1961f21ef4bddb0194f435c478ced137f34d81bd50"} Sep 29 22:01:25 crc kubenswrapper[4911]: I0929 22:01:25.502408 4911 scope.go:117] "RemoveContainer" containerID="cf3902371ff18d62ab98dcaca1c35a41b574028c73c6cba4dcb8f735f395f50a" Sep 29 22:01:25 crc kubenswrapper[4911]: I0929 22:01:25.503241 4911 scope.go:117] "RemoveContainer" containerID="36e2ce9b1f454aea68a43b1961f21ef4bddb0194f435c478ced137f34d81bd50" Sep 29 22:01:25 crc kubenswrapper[4911]: E0929 22:01:25.504165 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w647f_openshift-machine-config-operator(50640abc-40db-4390-82d1-f3cfc76da71c)\"" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" Sep 29 22:01:37 crc kubenswrapper[4911]: I0929 22:01:37.701449 4911 scope.go:117] "RemoveContainer" containerID="36e2ce9b1f454aea68a43b1961f21ef4bddb0194f435c478ced137f34d81bd50" Sep 29 22:01:37 crc kubenswrapper[4911]: E0929 22:01:37.702487 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w647f_openshift-machine-config-operator(50640abc-40db-4390-82d1-f3cfc76da71c)\"" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" Sep 29 22:01:51 crc kubenswrapper[4911]: I0929 22:01:51.701601 4911 scope.go:117] "RemoveContainer" containerID="36e2ce9b1f454aea68a43b1961f21ef4bddb0194f435c478ced137f34d81bd50" Sep 29 22:01:51 crc kubenswrapper[4911]: E0929 22:01:51.702443 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w647f_openshift-machine-config-operator(50640abc-40db-4390-82d1-f3cfc76da71c)\"" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" Sep 29 22:02:03 crc kubenswrapper[4911]: I0929 22:02:03.701557 4911 scope.go:117] "RemoveContainer" containerID="36e2ce9b1f454aea68a43b1961f21ef4bddb0194f435c478ced137f34d81bd50" Sep 29 22:02:03 crc kubenswrapper[4911]: E0929 22:02:03.702605 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w647f_openshift-machine-config-operator(50640abc-40db-4390-82d1-f3cfc76da71c)\"" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" Sep 29 22:02:18 crc kubenswrapper[4911]: I0929 22:02:18.701693 4911 scope.go:117] "RemoveContainer" containerID="36e2ce9b1f454aea68a43b1961f21ef4bddb0194f435c478ced137f34d81bd50" Sep 29 22:02:18 crc kubenswrapper[4911]: E0929 22:02:18.702358 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w647f_openshift-machine-config-operator(50640abc-40db-4390-82d1-f3cfc76da71c)\"" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" Sep 29 22:02:32 crc kubenswrapper[4911]: I0929 22:02:32.711898 4911 scope.go:117] "RemoveContainer" containerID="36e2ce9b1f454aea68a43b1961f21ef4bddb0194f435c478ced137f34d81bd50" Sep 29 22:02:32 crc kubenswrapper[4911]: E0929 22:02:32.713000 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w647f_openshift-machine-config-operator(50640abc-40db-4390-82d1-f3cfc76da71c)\"" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" Sep 29 22:02:45 crc kubenswrapper[4911]: I0929 22:02:45.700642 4911 scope.go:117] "RemoveContainer" containerID="36e2ce9b1f454aea68a43b1961f21ef4bddb0194f435c478ced137f34d81bd50" Sep 29 22:02:45 crc kubenswrapper[4911]: E0929 22:02:45.701415 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w647f_openshift-machine-config-operator(50640abc-40db-4390-82d1-f3cfc76da71c)\"" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" Sep 29 22:02:57 crc kubenswrapper[4911]: I0929 22:02:57.701312 4911 scope.go:117] "RemoveContainer" containerID="36e2ce9b1f454aea68a43b1961f21ef4bddb0194f435c478ced137f34d81bd50" Sep 29 22:02:57 crc kubenswrapper[4911]: E0929 22:02:57.702439 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w647f_openshift-machine-config-operator(50640abc-40db-4390-82d1-f3cfc76da71c)\"" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" Sep 29 22:03:08 crc kubenswrapper[4911]: I0929 22:03:08.701077 4911 scope.go:117] "RemoveContainer" containerID="36e2ce9b1f454aea68a43b1961f21ef4bddb0194f435c478ced137f34d81bd50" Sep 29 22:03:08 crc kubenswrapper[4911]: E0929 22:03:08.702214 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w647f_openshift-machine-config-operator(50640abc-40db-4390-82d1-f3cfc76da71c)\"" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" Sep 29 22:03:11 crc kubenswrapper[4911]: I0929 22:03:11.704385 4911 generic.go:334] "Generic (PLEG): container finished" podID="840af368-3414-4fe0-915a-5629b81bbdf4" containerID="7601af5193bccf2931c9fb691e8bb165cbf626b11974c1bc5e77f38120046b2a" exitCode=0 Sep 29 22:03:11 crc kubenswrapper[4911]: I0929 22:03:11.704454 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-xbkr4" event={"ID":"840af368-3414-4fe0-915a-5629b81bbdf4","Type":"ContainerDied","Data":"7601af5193bccf2931c9fb691e8bb165cbf626b11974c1bc5e77f38120046b2a"} Sep 29 22:03:13 crc kubenswrapper[4911]: I0929 22:03:13.206774 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-xbkr4" Sep 29 22:03:13 crc kubenswrapper[4911]: I0929 22:03:13.264543 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/840af368-3414-4fe0-915a-5629b81bbdf4-libvirt-secret-0\") pod \"840af368-3414-4fe0-915a-5629b81bbdf4\" (UID: \"840af368-3414-4fe0-915a-5629b81bbdf4\") " Sep 29 22:03:13 crc kubenswrapper[4911]: I0929 22:03:13.264622 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qfnqq\" (UniqueName: \"kubernetes.io/projected/840af368-3414-4fe0-915a-5629b81bbdf4-kube-api-access-qfnqq\") pod \"840af368-3414-4fe0-915a-5629b81bbdf4\" (UID: \"840af368-3414-4fe0-915a-5629b81bbdf4\") " Sep 29 22:03:13 crc kubenswrapper[4911]: I0929 22:03:13.264776 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/840af368-3414-4fe0-915a-5629b81bbdf4-inventory\") pod \"840af368-3414-4fe0-915a-5629b81bbdf4\" (UID: \"840af368-3414-4fe0-915a-5629b81bbdf4\") " Sep 29 22:03:13 crc kubenswrapper[4911]: I0929 22:03:13.264817 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/840af368-3414-4fe0-915a-5629b81bbdf4-libvirt-combined-ca-bundle\") pod \"840af368-3414-4fe0-915a-5629b81bbdf4\" (UID: \"840af368-3414-4fe0-915a-5629b81bbdf4\") " Sep 29 22:03:13 crc kubenswrapper[4911]: I0929 22:03:13.266090 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/840af368-3414-4fe0-915a-5629b81bbdf4-ssh-key\") pod \"840af368-3414-4fe0-915a-5629b81bbdf4\" (UID: \"840af368-3414-4fe0-915a-5629b81bbdf4\") " Sep 29 22:03:13 crc kubenswrapper[4911]: I0929 22:03:13.271082 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/840af368-3414-4fe0-915a-5629b81bbdf4-kube-api-access-qfnqq" (OuterVolumeSpecName: "kube-api-access-qfnqq") pod "840af368-3414-4fe0-915a-5629b81bbdf4" (UID: "840af368-3414-4fe0-915a-5629b81bbdf4"). InnerVolumeSpecName "kube-api-access-qfnqq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:03:13 crc kubenswrapper[4911]: I0929 22:03:13.272078 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/840af368-3414-4fe0-915a-5629b81bbdf4-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "840af368-3414-4fe0-915a-5629b81bbdf4" (UID: "840af368-3414-4fe0-915a-5629b81bbdf4"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:03:13 crc kubenswrapper[4911]: I0929 22:03:13.295773 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/840af368-3414-4fe0-915a-5629b81bbdf4-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "840af368-3414-4fe0-915a-5629b81bbdf4" (UID: "840af368-3414-4fe0-915a-5629b81bbdf4"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:03:13 crc kubenswrapper[4911]: I0929 22:03:13.302684 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/840af368-3414-4fe0-915a-5629b81bbdf4-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "840af368-3414-4fe0-915a-5629b81bbdf4" (UID: "840af368-3414-4fe0-915a-5629b81bbdf4"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:03:13 crc kubenswrapper[4911]: I0929 22:03:13.319715 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/840af368-3414-4fe0-915a-5629b81bbdf4-inventory" (OuterVolumeSpecName: "inventory") pod "840af368-3414-4fe0-915a-5629b81bbdf4" (UID: "840af368-3414-4fe0-915a-5629b81bbdf4"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:03:13 crc kubenswrapper[4911]: I0929 22:03:13.369859 4911 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/840af368-3414-4fe0-915a-5629b81bbdf4-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 22:03:13 crc kubenswrapper[4911]: I0929 22:03:13.370196 4911 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/840af368-3414-4fe0-915a-5629b81bbdf4-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Sep 29 22:03:13 crc kubenswrapper[4911]: I0929 22:03:13.370316 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qfnqq\" (UniqueName: \"kubernetes.io/projected/840af368-3414-4fe0-915a-5629b81bbdf4-kube-api-access-qfnqq\") on node \"crc\" DevicePath \"\"" Sep 29 22:03:13 crc kubenswrapper[4911]: I0929 22:03:13.370421 4911 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/840af368-3414-4fe0-915a-5629b81bbdf4-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 22:03:13 crc kubenswrapper[4911]: I0929 22:03:13.370504 4911 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/840af368-3414-4fe0-915a-5629b81bbdf4-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:03:13 crc kubenswrapper[4911]: I0929 22:03:13.732109 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-xbkr4" event={"ID":"840af368-3414-4fe0-915a-5629b81bbdf4","Type":"ContainerDied","Data":"d3650dbaffe23fa4c56c00b22945c24c8d16cdf9234a04c43ba97d980e9e2062"} Sep 29 22:03:13 crc kubenswrapper[4911]: I0929 22:03:13.732175 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d3650dbaffe23fa4c56c00b22945c24c8d16cdf9234a04c43ba97d980e9e2062" Sep 29 22:03:13 crc kubenswrapper[4911]: I0929 22:03:13.732515 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-xbkr4" Sep 29 22:03:13 crc kubenswrapper[4911]: I0929 22:03:13.849751 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-dzlt5"] Sep 29 22:03:13 crc kubenswrapper[4911]: E0929 22:03:13.850441 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="840af368-3414-4fe0-915a-5629b81bbdf4" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Sep 29 22:03:13 crc kubenswrapper[4911]: I0929 22:03:13.850570 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="840af368-3414-4fe0-915a-5629b81bbdf4" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Sep 29 22:03:13 crc kubenswrapper[4911]: E0929 22:03:13.850712 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bbf8467c-b7de-4104-b9a7-59e1d163bfb7" containerName="keystone-cron" Sep 29 22:03:13 crc kubenswrapper[4911]: I0929 22:03:13.850829 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="bbf8467c-b7de-4104-b9a7-59e1d163bfb7" containerName="keystone-cron" Sep 29 22:03:13 crc kubenswrapper[4911]: I0929 22:03:13.851133 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="840af368-3414-4fe0-915a-5629b81bbdf4" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Sep 29 22:03:13 crc kubenswrapper[4911]: I0929 22:03:13.851217 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="bbf8467c-b7de-4104-b9a7-59e1d163bfb7" containerName="keystone-cron" Sep 29 22:03:13 crc kubenswrapper[4911]: I0929 22:03:13.852036 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-dzlt5" Sep 29 22:03:13 crc kubenswrapper[4911]: I0929 22:03:13.856205 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-extra-config" Sep 29 22:03:13 crc kubenswrapper[4911]: I0929 22:03:13.856637 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-h4cgt" Sep 29 22:03:13 crc kubenswrapper[4911]: I0929 22:03:13.857185 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 22:03:13 crc kubenswrapper[4911]: I0929 22:03:13.857380 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Sep 29 22:03:13 crc kubenswrapper[4911]: I0929 22:03:13.857564 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 22:03:13 crc kubenswrapper[4911]: I0929 22:03:13.857769 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 22:03:13 crc kubenswrapper[4911]: I0929 22:03:13.858205 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Sep 29 22:03:13 crc kubenswrapper[4911]: I0929 22:03:13.860742 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-dzlt5"] Sep 29 22:03:13 crc kubenswrapper[4911]: I0929 22:03:13.885873 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ee01ae86-832b-41fb-8a4a-53a73ffdb3aa-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-dzlt5\" (UID: \"ee01ae86-832b-41fb-8a4a-53a73ffdb3aa\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-dzlt5" Sep 29 22:03:13 crc kubenswrapper[4911]: I0929 22:03:13.885940 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/ee01ae86-832b-41fb-8a4a-53a73ffdb3aa-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-dzlt5\" (UID: \"ee01ae86-832b-41fb-8a4a-53a73ffdb3aa\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-dzlt5" Sep 29 22:03:13 crc kubenswrapper[4911]: I0929 22:03:13.885971 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ee01ae86-832b-41fb-8a4a-53a73ffdb3aa-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-dzlt5\" (UID: \"ee01ae86-832b-41fb-8a4a-53a73ffdb3aa\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-dzlt5" Sep 29 22:03:13 crc kubenswrapper[4911]: I0929 22:03:13.886130 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4cj55\" (UniqueName: \"kubernetes.io/projected/ee01ae86-832b-41fb-8a4a-53a73ffdb3aa-kube-api-access-4cj55\") pod \"nova-edpm-deployment-openstack-edpm-ipam-dzlt5\" (UID: \"ee01ae86-832b-41fb-8a4a-53a73ffdb3aa\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-dzlt5" Sep 29 22:03:13 crc kubenswrapper[4911]: I0929 22:03:13.886248 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/ee01ae86-832b-41fb-8a4a-53a73ffdb3aa-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-dzlt5\" (UID: \"ee01ae86-832b-41fb-8a4a-53a73ffdb3aa\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-dzlt5" Sep 29 22:03:13 crc kubenswrapper[4911]: I0929 22:03:13.886296 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/ee01ae86-832b-41fb-8a4a-53a73ffdb3aa-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-dzlt5\" (UID: \"ee01ae86-832b-41fb-8a4a-53a73ffdb3aa\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-dzlt5" Sep 29 22:03:13 crc kubenswrapper[4911]: I0929 22:03:13.886524 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/ee01ae86-832b-41fb-8a4a-53a73ffdb3aa-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-dzlt5\" (UID: \"ee01ae86-832b-41fb-8a4a-53a73ffdb3aa\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-dzlt5" Sep 29 22:03:13 crc kubenswrapper[4911]: I0929 22:03:13.886655 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee01ae86-832b-41fb-8a4a-53a73ffdb3aa-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-dzlt5\" (UID: \"ee01ae86-832b-41fb-8a4a-53a73ffdb3aa\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-dzlt5" Sep 29 22:03:13 crc kubenswrapper[4911]: I0929 22:03:13.886745 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/ee01ae86-832b-41fb-8a4a-53a73ffdb3aa-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-dzlt5\" (UID: \"ee01ae86-832b-41fb-8a4a-53a73ffdb3aa\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-dzlt5" Sep 29 22:03:13 crc kubenswrapper[4911]: I0929 22:03:13.988326 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/ee01ae86-832b-41fb-8a4a-53a73ffdb3aa-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-dzlt5\" (UID: \"ee01ae86-832b-41fb-8a4a-53a73ffdb3aa\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-dzlt5" Sep 29 22:03:13 crc kubenswrapper[4911]: I0929 22:03:13.988444 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee01ae86-832b-41fb-8a4a-53a73ffdb3aa-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-dzlt5\" (UID: \"ee01ae86-832b-41fb-8a4a-53a73ffdb3aa\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-dzlt5" Sep 29 22:03:13 crc kubenswrapper[4911]: I0929 22:03:13.988523 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/ee01ae86-832b-41fb-8a4a-53a73ffdb3aa-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-dzlt5\" (UID: \"ee01ae86-832b-41fb-8a4a-53a73ffdb3aa\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-dzlt5" Sep 29 22:03:13 crc kubenswrapper[4911]: I0929 22:03:13.988622 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ee01ae86-832b-41fb-8a4a-53a73ffdb3aa-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-dzlt5\" (UID: \"ee01ae86-832b-41fb-8a4a-53a73ffdb3aa\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-dzlt5" Sep 29 22:03:13 crc kubenswrapper[4911]: I0929 22:03:13.988673 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/ee01ae86-832b-41fb-8a4a-53a73ffdb3aa-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-dzlt5\" (UID: \"ee01ae86-832b-41fb-8a4a-53a73ffdb3aa\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-dzlt5" Sep 29 22:03:13 crc kubenswrapper[4911]: I0929 22:03:13.988704 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ee01ae86-832b-41fb-8a4a-53a73ffdb3aa-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-dzlt5\" (UID: \"ee01ae86-832b-41fb-8a4a-53a73ffdb3aa\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-dzlt5" Sep 29 22:03:13 crc kubenswrapper[4911]: I0929 22:03:13.988745 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4cj55\" (UniqueName: \"kubernetes.io/projected/ee01ae86-832b-41fb-8a4a-53a73ffdb3aa-kube-api-access-4cj55\") pod \"nova-edpm-deployment-openstack-edpm-ipam-dzlt5\" (UID: \"ee01ae86-832b-41fb-8a4a-53a73ffdb3aa\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-dzlt5" Sep 29 22:03:13 crc kubenswrapper[4911]: I0929 22:03:13.988830 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/ee01ae86-832b-41fb-8a4a-53a73ffdb3aa-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-dzlt5\" (UID: \"ee01ae86-832b-41fb-8a4a-53a73ffdb3aa\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-dzlt5" Sep 29 22:03:13 crc kubenswrapper[4911]: I0929 22:03:13.988866 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/ee01ae86-832b-41fb-8a4a-53a73ffdb3aa-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-dzlt5\" (UID: \"ee01ae86-832b-41fb-8a4a-53a73ffdb3aa\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-dzlt5" Sep 29 22:03:13 crc kubenswrapper[4911]: I0929 22:03:13.991338 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/ee01ae86-832b-41fb-8a4a-53a73ffdb3aa-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-dzlt5\" (UID: \"ee01ae86-832b-41fb-8a4a-53a73ffdb3aa\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-dzlt5" Sep 29 22:03:13 crc kubenswrapper[4911]: I0929 22:03:13.994606 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ee01ae86-832b-41fb-8a4a-53a73ffdb3aa-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-dzlt5\" (UID: \"ee01ae86-832b-41fb-8a4a-53a73ffdb3aa\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-dzlt5" Sep 29 22:03:13 crc kubenswrapper[4911]: I0929 22:03:13.995524 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/ee01ae86-832b-41fb-8a4a-53a73ffdb3aa-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-dzlt5\" (UID: \"ee01ae86-832b-41fb-8a4a-53a73ffdb3aa\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-dzlt5" Sep 29 22:03:13 crc kubenswrapper[4911]: I0929 22:03:13.995882 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/ee01ae86-832b-41fb-8a4a-53a73ffdb3aa-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-dzlt5\" (UID: \"ee01ae86-832b-41fb-8a4a-53a73ffdb3aa\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-dzlt5" Sep 29 22:03:13 crc kubenswrapper[4911]: I0929 22:03:13.996486 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/ee01ae86-832b-41fb-8a4a-53a73ffdb3aa-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-dzlt5\" (UID: \"ee01ae86-832b-41fb-8a4a-53a73ffdb3aa\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-dzlt5" Sep 29 22:03:13 crc kubenswrapper[4911]: I0929 22:03:13.996609 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee01ae86-832b-41fb-8a4a-53a73ffdb3aa-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-dzlt5\" (UID: \"ee01ae86-832b-41fb-8a4a-53a73ffdb3aa\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-dzlt5" Sep 29 22:03:14 crc kubenswrapper[4911]: I0929 22:03:14.000382 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/ee01ae86-832b-41fb-8a4a-53a73ffdb3aa-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-dzlt5\" (UID: \"ee01ae86-832b-41fb-8a4a-53a73ffdb3aa\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-dzlt5" Sep 29 22:03:14 crc kubenswrapper[4911]: I0929 22:03:14.003498 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ee01ae86-832b-41fb-8a4a-53a73ffdb3aa-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-dzlt5\" (UID: \"ee01ae86-832b-41fb-8a4a-53a73ffdb3aa\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-dzlt5" Sep 29 22:03:14 crc kubenswrapper[4911]: I0929 22:03:14.005915 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4cj55\" (UniqueName: \"kubernetes.io/projected/ee01ae86-832b-41fb-8a4a-53a73ffdb3aa-kube-api-access-4cj55\") pod \"nova-edpm-deployment-openstack-edpm-ipam-dzlt5\" (UID: \"ee01ae86-832b-41fb-8a4a-53a73ffdb3aa\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-dzlt5" Sep 29 22:03:14 crc kubenswrapper[4911]: I0929 22:03:14.178678 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-dzlt5" Sep 29 22:03:14 crc kubenswrapper[4911]: I0929 22:03:14.739841 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-dzlt5"] Sep 29 22:03:14 crc kubenswrapper[4911]: W0929 22:03:14.750122 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podee01ae86_832b_41fb_8a4a_53a73ffdb3aa.slice/crio-0e35fc868634fb7675e55e13780a54c0464c49236659481d8f34e3617a65c0e1 WatchSource:0}: Error finding container 0e35fc868634fb7675e55e13780a54c0464c49236659481d8f34e3617a65c0e1: Status 404 returned error can't find the container with id 0e35fc868634fb7675e55e13780a54c0464c49236659481d8f34e3617a65c0e1 Sep 29 22:03:15 crc kubenswrapper[4911]: I0929 22:03:15.754994 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-dzlt5" event={"ID":"ee01ae86-832b-41fb-8a4a-53a73ffdb3aa","Type":"ContainerStarted","Data":"bca76067bd026005e3eaf907f0a32ea3a18a6b186ebfd141dedc4f656ce3e890"} Sep 29 22:03:15 crc kubenswrapper[4911]: I0929 22:03:15.755885 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-dzlt5" event={"ID":"ee01ae86-832b-41fb-8a4a-53a73ffdb3aa","Type":"ContainerStarted","Data":"0e35fc868634fb7675e55e13780a54c0464c49236659481d8f34e3617a65c0e1"} Sep 29 22:03:15 crc kubenswrapper[4911]: I0929 22:03:15.784728 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-dzlt5" podStartSLOduration=2.102817645 podStartE2EDuration="2.784704747s" podCreationTimestamp="2025-09-29 22:03:13 +0000 UTC" firstStartedPulling="2025-09-29 22:03:14.753365208 +0000 UTC m=+2272.730477889" lastFinishedPulling="2025-09-29 22:03:15.43525232 +0000 UTC m=+2273.412364991" observedRunningTime="2025-09-29 22:03:15.774479058 +0000 UTC m=+2273.751591739" watchObservedRunningTime="2025-09-29 22:03:15.784704747 +0000 UTC m=+2273.761817418" Sep 29 22:03:22 crc kubenswrapper[4911]: I0929 22:03:22.733760 4911 scope.go:117] "RemoveContainer" containerID="36e2ce9b1f454aea68a43b1961f21ef4bddb0194f435c478ced137f34d81bd50" Sep 29 22:03:22 crc kubenswrapper[4911]: E0929 22:03:22.738325 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w647f_openshift-machine-config-operator(50640abc-40db-4390-82d1-f3cfc76da71c)\"" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" Sep 29 22:03:33 crc kubenswrapper[4911]: I0929 22:03:33.701496 4911 scope.go:117] "RemoveContainer" containerID="36e2ce9b1f454aea68a43b1961f21ef4bddb0194f435c478ced137f34d81bd50" Sep 29 22:03:33 crc kubenswrapper[4911]: E0929 22:03:33.703588 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w647f_openshift-machine-config-operator(50640abc-40db-4390-82d1-f3cfc76da71c)\"" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" Sep 29 22:03:48 crc kubenswrapper[4911]: I0929 22:03:48.701154 4911 scope.go:117] "RemoveContainer" containerID="36e2ce9b1f454aea68a43b1961f21ef4bddb0194f435c478ced137f34d81bd50" Sep 29 22:03:48 crc kubenswrapper[4911]: E0929 22:03:48.703372 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w647f_openshift-machine-config-operator(50640abc-40db-4390-82d1-f3cfc76da71c)\"" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" Sep 29 22:03:59 crc kubenswrapper[4911]: I0929 22:03:59.701132 4911 scope.go:117] "RemoveContainer" containerID="36e2ce9b1f454aea68a43b1961f21ef4bddb0194f435c478ced137f34d81bd50" Sep 29 22:03:59 crc kubenswrapper[4911]: E0929 22:03:59.701960 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w647f_openshift-machine-config-operator(50640abc-40db-4390-82d1-f3cfc76da71c)\"" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" Sep 29 22:04:12 crc kubenswrapper[4911]: I0929 22:04:12.706625 4911 scope.go:117] "RemoveContainer" containerID="36e2ce9b1f454aea68a43b1961f21ef4bddb0194f435c478ced137f34d81bd50" Sep 29 22:04:12 crc kubenswrapper[4911]: E0929 22:04:12.707455 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w647f_openshift-machine-config-operator(50640abc-40db-4390-82d1-f3cfc76da71c)\"" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" Sep 29 22:04:26 crc kubenswrapper[4911]: I0929 22:04:26.701983 4911 scope.go:117] "RemoveContainer" containerID="36e2ce9b1f454aea68a43b1961f21ef4bddb0194f435c478ced137f34d81bd50" Sep 29 22:04:26 crc kubenswrapper[4911]: E0929 22:04:26.703028 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w647f_openshift-machine-config-operator(50640abc-40db-4390-82d1-f3cfc76da71c)\"" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" Sep 29 22:04:41 crc kubenswrapper[4911]: I0929 22:04:41.701611 4911 scope.go:117] "RemoveContainer" containerID="36e2ce9b1f454aea68a43b1961f21ef4bddb0194f435c478ced137f34d81bd50" Sep 29 22:04:41 crc kubenswrapper[4911]: E0929 22:04:41.702415 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w647f_openshift-machine-config-operator(50640abc-40db-4390-82d1-f3cfc76da71c)\"" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" Sep 29 22:04:55 crc kubenswrapper[4911]: I0929 22:04:55.701584 4911 scope.go:117] "RemoveContainer" containerID="36e2ce9b1f454aea68a43b1961f21ef4bddb0194f435c478ced137f34d81bd50" Sep 29 22:04:55 crc kubenswrapper[4911]: E0929 22:04:55.703005 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w647f_openshift-machine-config-operator(50640abc-40db-4390-82d1-f3cfc76da71c)\"" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" Sep 29 22:05:09 crc kubenswrapper[4911]: I0929 22:05:09.700886 4911 scope.go:117] "RemoveContainer" containerID="36e2ce9b1f454aea68a43b1961f21ef4bddb0194f435c478ced137f34d81bd50" Sep 29 22:05:09 crc kubenswrapper[4911]: E0929 22:05:09.701863 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w647f_openshift-machine-config-operator(50640abc-40db-4390-82d1-f3cfc76da71c)\"" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" Sep 29 22:05:24 crc kubenswrapper[4911]: I0929 22:05:24.701220 4911 scope.go:117] "RemoveContainer" containerID="36e2ce9b1f454aea68a43b1961f21ef4bddb0194f435c478ced137f34d81bd50" Sep 29 22:05:24 crc kubenswrapper[4911]: E0929 22:05:24.702542 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w647f_openshift-machine-config-operator(50640abc-40db-4390-82d1-f3cfc76da71c)\"" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" Sep 29 22:05:39 crc kubenswrapper[4911]: I0929 22:05:39.701700 4911 scope.go:117] "RemoveContainer" containerID="36e2ce9b1f454aea68a43b1961f21ef4bddb0194f435c478ced137f34d81bd50" Sep 29 22:05:39 crc kubenswrapper[4911]: E0929 22:05:39.702466 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w647f_openshift-machine-config-operator(50640abc-40db-4390-82d1-f3cfc76da71c)\"" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" Sep 29 22:05:52 crc kubenswrapper[4911]: I0929 22:05:52.714549 4911 scope.go:117] "RemoveContainer" containerID="36e2ce9b1f454aea68a43b1961f21ef4bddb0194f435c478ced137f34d81bd50" Sep 29 22:05:52 crc kubenswrapper[4911]: E0929 22:05:52.715896 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w647f_openshift-machine-config-operator(50640abc-40db-4390-82d1-f3cfc76da71c)\"" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" Sep 29 22:06:07 crc kubenswrapper[4911]: I0929 22:06:07.701206 4911 scope.go:117] "RemoveContainer" containerID="36e2ce9b1f454aea68a43b1961f21ef4bddb0194f435c478ced137f34d81bd50" Sep 29 22:06:07 crc kubenswrapper[4911]: E0929 22:06:07.702340 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w647f_openshift-machine-config-operator(50640abc-40db-4390-82d1-f3cfc76da71c)\"" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" Sep 29 22:06:18 crc kubenswrapper[4911]: I0929 22:06:18.701113 4911 scope.go:117] "RemoveContainer" containerID="36e2ce9b1f454aea68a43b1961f21ef4bddb0194f435c478ced137f34d81bd50" Sep 29 22:06:18 crc kubenswrapper[4911]: E0929 22:06:18.704210 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w647f_openshift-machine-config-operator(50640abc-40db-4390-82d1-f3cfc76da71c)\"" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" Sep 29 22:06:32 crc kubenswrapper[4911]: I0929 22:06:32.712465 4911 scope.go:117] "RemoveContainer" containerID="36e2ce9b1f454aea68a43b1961f21ef4bddb0194f435c478ced137f34d81bd50" Sep 29 22:06:33 crc kubenswrapper[4911]: I0929 22:06:33.800716 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w647f" event={"ID":"50640abc-40db-4390-82d1-f3cfc76da71c","Type":"ContainerStarted","Data":"7be79346af5955e6a8de71a5a3427d7157502674786602aada446da1543ebd80"} Sep 29 22:06:54 crc kubenswrapper[4911]: I0929 22:06:54.030592 4911 generic.go:334] "Generic (PLEG): container finished" podID="ee01ae86-832b-41fb-8a4a-53a73ffdb3aa" containerID="bca76067bd026005e3eaf907f0a32ea3a18a6b186ebfd141dedc4f656ce3e890" exitCode=0 Sep 29 22:06:54 crc kubenswrapper[4911]: I0929 22:06:54.030710 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-dzlt5" event={"ID":"ee01ae86-832b-41fb-8a4a-53a73ffdb3aa","Type":"ContainerDied","Data":"bca76067bd026005e3eaf907f0a32ea3a18a6b186ebfd141dedc4f656ce3e890"} Sep 29 22:06:55 crc kubenswrapper[4911]: I0929 22:06:55.427300 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-dzlt5" Sep 29 22:06:55 crc kubenswrapper[4911]: I0929 22:06:55.607361 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/ee01ae86-832b-41fb-8a4a-53a73ffdb3aa-nova-cell1-compute-config-1\") pod \"ee01ae86-832b-41fb-8a4a-53a73ffdb3aa\" (UID: \"ee01ae86-832b-41fb-8a4a-53a73ffdb3aa\") " Sep 29 22:06:55 crc kubenswrapper[4911]: I0929 22:06:55.607446 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/ee01ae86-832b-41fb-8a4a-53a73ffdb3aa-nova-cell1-compute-config-0\") pod \"ee01ae86-832b-41fb-8a4a-53a73ffdb3aa\" (UID: \"ee01ae86-832b-41fb-8a4a-53a73ffdb3aa\") " Sep 29 22:06:55 crc kubenswrapper[4911]: I0929 22:06:55.607535 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ee01ae86-832b-41fb-8a4a-53a73ffdb3aa-inventory\") pod \"ee01ae86-832b-41fb-8a4a-53a73ffdb3aa\" (UID: \"ee01ae86-832b-41fb-8a4a-53a73ffdb3aa\") " Sep 29 22:06:55 crc kubenswrapper[4911]: I0929 22:06:55.607558 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ee01ae86-832b-41fb-8a4a-53a73ffdb3aa-ssh-key\") pod \"ee01ae86-832b-41fb-8a4a-53a73ffdb3aa\" (UID: \"ee01ae86-832b-41fb-8a4a-53a73ffdb3aa\") " Sep 29 22:06:55 crc kubenswrapper[4911]: I0929 22:06:55.607599 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/ee01ae86-832b-41fb-8a4a-53a73ffdb3aa-nova-migration-ssh-key-0\") pod \"ee01ae86-832b-41fb-8a4a-53a73ffdb3aa\" (UID: \"ee01ae86-832b-41fb-8a4a-53a73ffdb3aa\") " Sep 29 22:06:55 crc kubenswrapper[4911]: I0929 22:06:55.607629 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/ee01ae86-832b-41fb-8a4a-53a73ffdb3aa-nova-extra-config-0\") pod \"ee01ae86-832b-41fb-8a4a-53a73ffdb3aa\" (UID: \"ee01ae86-832b-41fb-8a4a-53a73ffdb3aa\") " Sep 29 22:06:55 crc kubenswrapper[4911]: I0929 22:06:55.607647 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4cj55\" (UniqueName: \"kubernetes.io/projected/ee01ae86-832b-41fb-8a4a-53a73ffdb3aa-kube-api-access-4cj55\") pod \"ee01ae86-832b-41fb-8a4a-53a73ffdb3aa\" (UID: \"ee01ae86-832b-41fb-8a4a-53a73ffdb3aa\") " Sep 29 22:06:55 crc kubenswrapper[4911]: I0929 22:06:55.607752 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/ee01ae86-832b-41fb-8a4a-53a73ffdb3aa-nova-migration-ssh-key-1\") pod \"ee01ae86-832b-41fb-8a4a-53a73ffdb3aa\" (UID: \"ee01ae86-832b-41fb-8a4a-53a73ffdb3aa\") " Sep 29 22:06:55 crc kubenswrapper[4911]: I0929 22:06:55.607872 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee01ae86-832b-41fb-8a4a-53a73ffdb3aa-nova-combined-ca-bundle\") pod \"ee01ae86-832b-41fb-8a4a-53a73ffdb3aa\" (UID: \"ee01ae86-832b-41fb-8a4a-53a73ffdb3aa\") " Sep 29 22:06:55 crc kubenswrapper[4911]: I0929 22:06:55.616214 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee01ae86-832b-41fb-8a4a-53a73ffdb3aa-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "ee01ae86-832b-41fb-8a4a-53a73ffdb3aa" (UID: "ee01ae86-832b-41fb-8a4a-53a73ffdb3aa"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:06:55 crc kubenswrapper[4911]: I0929 22:06:55.616327 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ee01ae86-832b-41fb-8a4a-53a73ffdb3aa-kube-api-access-4cj55" (OuterVolumeSpecName: "kube-api-access-4cj55") pod "ee01ae86-832b-41fb-8a4a-53a73ffdb3aa" (UID: "ee01ae86-832b-41fb-8a4a-53a73ffdb3aa"). InnerVolumeSpecName "kube-api-access-4cj55". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:06:55 crc kubenswrapper[4911]: I0929 22:06:55.636922 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee01ae86-832b-41fb-8a4a-53a73ffdb3aa-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "ee01ae86-832b-41fb-8a4a-53a73ffdb3aa" (UID: "ee01ae86-832b-41fb-8a4a-53a73ffdb3aa"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:06:55 crc kubenswrapper[4911]: I0929 22:06:55.638346 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee01ae86-832b-41fb-8a4a-53a73ffdb3aa-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "ee01ae86-832b-41fb-8a4a-53a73ffdb3aa" (UID: "ee01ae86-832b-41fb-8a4a-53a73ffdb3aa"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:06:55 crc kubenswrapper[4911]: I0929 22:06:55.639316 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee01ae86-832b-41fb-8a4a-53a73ffdb3aa-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "ee01ae86-832b-41fb-8a4a-53a73ffdb3aa" (UID: "ee01ae86-832b-41fb-8a4a-53a73ffdb3aa"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:06:55 crc kubenswrapper[4911]: I0929 22:06:55.645226 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee01ae86-832b-41fb-8a4a-53a73ffdb3aa-inventory" (OuterVolumeSpecName: "inventory") pod "ee01ae86-832b-41fb-8a4a-53a73ffdb3aa" (UID: "ee01ae86-832b-41fb-8a4a-53a73ffdb3aa"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:06:55 crc kubenswrapper[4911]: I0929 22:06:55.648513 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee01ae86-832b-41fb-8a4a-53a73ffdb3aa-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "ee01ae86-832b-41fb-8a4a-53a73ffdb3aa" (UID: "ee01ae86-832b-41fb-8a4a-53a73ffdb3aa"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:06:55 crc kubenswrapper[4911]: I0929 22:06:55.652429 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ee01ae86-832b-41fb-8a4a-53a73ffdb3aa-nova-extra-config-0" (OuterVolumeSpecName: "nova-extra-config-0") pod "ee01ae86-832b-41fb-8a4a-53a73ffdb3aa" (UID: "ee01ae86-832b-41fb-8a4a-53a73ffdb3aa"). InnerVolumeSpecName "nova-extra-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:06:55 crc kubenswrapper[4911]: I0929 22:06:55.662946 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee01ae86-832b-41fb-8a4a-53a73ffdb3aa-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "ee01ae86-832b-41fb-8a4a-53a73ffdb3aa" (UID: "ee01ae86-832b-41fb-8a4a-53a73ffdb3aa"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:06:55 crc kubenswrapper[4911]: I0929 22:06:55.710339 4911 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ee01ae86-832b-41fb-8a4a-53a73ffdb3aa-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 22:06:55 crc kubenswrapper[4911]: I0929 22:06:55.710525 4911 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ee01ae86-832b-41fb-8a4a-53a73ffdb3aa-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 22:06:55 crc kubenswrapper[4911]: I0929 22:06:55.710582 4911 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/ee01ae86-832b-41fb-8a4a-53a73ffdb3aa-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Sep 29 22:06:55 crc kubenswrapper[4911]: I0929 22:06:55.710599 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4cj55\" (UniqueName: \"kubernetes.io/projected/ee01ae86-832b-41fb-8a4a-53a73ffdb3aa-kube-api-access-4cj55\") on node \"crc\" DevicePath \"\"" Sep 29 22:06:55 crc kubenswrapper[4911]: I0929 22:06:55.710615 4911 reconciler_common.go:293] "Volume detached for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/ee01ae86-832b-41fb-8a4a-53a73ffdb3aa-nova-extra-config-0\") on node \"crc\" DevicePath \"\"" Sep 29 22:06:55 crc kubenswrapper[4911]: I0929 22:06:55.710627 4911 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/ee01ae86-832b-41fb-8a4a-53a73ffdb3aa-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Sep 29 22:06:55 crc kubenswrapper[4911]: I0929 22:06:55.710638 4911 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee01ae86-832b-41fb-8a4a-53a73ffdb3aa-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:06:55 crc kubenswrapper[4911]: I0929 22:06:55.710654 4911 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/ee01ae86-832b-41fb-8a4a-53a73ffdb3aa-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Sep 29 22:06:55 crc kubenswrapper[4911]: I0929 22:06:55.710666 4911 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/ee01ae86-832b-41fb-8a4a-53a73ffdb3aa-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Sep 29 22:06:56 crc kubenswrapper[4911]: I0929 22:06:56.053971 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-dzlt5" event={"ID":"ee01ae86-832b-41fb-8a4a-53a73ffdb3aa","Type":"ContainerDied","Data":"0e35fc868634fb7675e55e13780a54c0464c49236659481d8f34e3617a65c0e1"} Sep 29 22:06:56 crc kubenswrapper[4911]: I0929 22:06:56.054041 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0e35fc868634fb7675e55e13780a54c0464c49236659481d8f34e3617a65c0e1" Sep 29 22:06:56 crc kubenswrapper[4911]: I0929 22:06:56.054072 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-dzlt5" Sep 29 22:06:56 crc kubenswrapper[4911]: I0929 22:06:56.175398 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vsqh9"] Sep 29 22:06:56 crc kubenswrapper[4911]: E0929 22:06:56.176031 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee01ae86-832b-41fb-8a4a-53a73ffdb3aa" containerName="nova-edpm-deployment-openstack-edpm-ipam" Sep 29 22:06:56 crc kubenswrapper[4911]: I0929 22:06:56.176055 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee01ae86-832b-41fb-8a4a-53a73ffdb3aa" containerName="nova-edpm-deployment-openstack-edpm-ipam" Sep 29 22:06:56 crc kubenswrapper[4911]: I0929 22:06:56.176308 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="ee01ae86-832b-41fb-8a4a-53a73ffdb3aa" containerName="nova-edpm-deployment-openstack-edpm-ipam" Sep 29 22:06:56 crc kubenswrapper[4911]: I0929 22:06:56.177048 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vsqh9" Sep 29 22:06:56 crc kubenswrapper[4911]: I0929 22:06:56.179346 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 22:06:56 crc kubenswrapper[4911]: I0929 22:06:56.180054 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-h4cgt" Sep 29 22:06:56 crc kubenswrapper[4911]: I0929 22:06:56.180375 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 22:06:56 crc kubenswrapper[4911]: I0929 22:06:56.181012 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 22:06:56 crc kubenswrapper[4911]: I0929 22:06:56.182887 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-compute-config-data" Sep 29 22:06:56 crc kubenswrapper[4911]: I0929 22:06:56.193129 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vsqh9"] Sep 29 22:06:56 crc kubenswrapper[4911]: I0929 22:06:56.324044 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/16ca72ec-4fbd-4367-b5c8-ea180f6fc189-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vsqh9\" (UID: \"16ca72ec-4fbd-4367-b5c8-ea180f6fc189\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vsqh9" Sep 29 22:06:56 crc kubenswrapper[4911]: I0929 22:06:56.324132 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/16ca72ec-4fbd-4367-b5c8-ea180f6fc189-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vsqh9\" (UID: \"16ca72ec-4fbd-4367-b5c8-ea180f6fc189\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vsqh9" Sep 29 22:06:56 crc kubenswrapper[4911]: I0929 22:06:56.324351 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/16ca72ec-4fbd-4367-b5c8-ea180f6fc189-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vsqh9\" (UID: \"16ca72ec-4fbd-4367-b5c8-ea180f6fc189\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vsqh9" Sep 29 22:06:56 crc kubenswrapper[4911]: I0929 22:06:56.324426 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16ca72ec-4fbd-4367-b5c8-ea180f6fc189-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vsqh9\" (UID: \"16ca72ec-4fbd-4367-b5c8-ea180f6fc189\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vsqh9" Sep 29 22:06:56 crc kubenswrapper[4911]: I0929 22:06:56.324572 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/16ca72ec-4fbd-4367-b5c8-ea180f6fc189-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vsqh9\" (UID: \"16ca72ec-4fbd-4367-b5c8-ea180f6fc189\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vsqh9" Sep 29 22:06:56 crc kubenswrapper[4911]: I0929 22:06:56.324628 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r7twr\" (UniqueName: \"kubernetes.io/projected/16ca72ec-4fbd-4367-b5c8-ea180f6fc189-kube-api-access-r7twr\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vsqh9\" (UID: \"16ca72ec-4fbd-4367-b5c8-ea180f6fc189\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vsqh9" Sep 29 22:06:56 crc kubenswrapper[4911]: I0929 22:06:56.324685 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/16ca72ec-4fbd-4367-b5c8-ea180f6fc189-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vsqh9\" (UID: \"16ca72ec-4fbd-4367-b5c8-ea180f6fc189\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vsqh9" Sep 29 22:06:56 crc kubenswrapper[4911]: I0929 22:06:56.427678 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/16ca72ec-4fbd-4367-b5c8-ea180f6fc189-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vsqh9\" (UID: \"16ca72ec-4fbd-4367-b5c8-ea180f6fc189\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vsqh9" Sep 29 22:06:56 crc kubenswrapper[4911]: I0929 22:06:56.427852 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/16ca72ec-4fbd-4367-b5c8-ea180f6fc189-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vsqh9\" (UID: \"16ca72ec-4fbd-4367-b5c8-ea180f6fc189\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vsqh9" Sep 29 22:06:56 crc kubenswrapper[4911]: I0929 22:06:56.427917 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/16ca72ec-4fbd-4367-b5c8-ea180f6fc189-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vsqh9\" (UID: \"16ca72ec-4fbd-4367-b5c8-ea180f6fc189\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vsqh9" Sep 29 22:06:56 crc kubenswrapper[4911]: I0929 22:06:56.427965 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16ca72ec-4fbd-4367-b5c8-ea180f6fc189-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vsqh9\" (UID: \"16ca72ec-4fbd-4367-b5c8-ea180f6fc189\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vsqh9" Sep 29 22:06:56 crc kubenswrapper[4911]: I0929 22:06:56.428037 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/16ca72ec-4fbd-4367-b5c8-ea180f6fc189-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vsqh9\" (UID: \"16ca72ec-4fbd-4367-b5c8-ea180f6fc189\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vsqh9" Sep 29 22:06:56 crc kubenswrapper[4911]: I0929 22:06:56.428072 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r7twr\" (UniqueName: \"kubernetes.io/projected/16ca72ec-4fbd-4367-b5c8-ea180f6fc189-kube-api-access-r7twr\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vsqh9\" (UID: \"16ca72ec-4fbd-4367-b5c8-ea180f6fc189\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vsqh9" Sep 29 22:06:56 crc kubenswrapper[4911]: I0929 22:06:56.428129 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/16ca72ec-4fbd-4367-b5c8-ea180f6fc189-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vsqh9\" (UID: \"16ca72ec-4fbd-4367-b5c8-ea180f6fc189\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vsqh9" Sep 29 22:06:56 crc kubenswrapper[4911]: I0929 22:06:56.432764 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/16ca72ec-4fbd-4367-b5c8-ea180f6fc189-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vsqh9\" (UID: \"16ca72ec-4fbd-4367-b5c8-ea180f6fc189\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vsqh9" Sep 29 22:06:56 crc kubenswrapper[4911]: I0929 22:06:56.433787 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/16ca72ec-4fbd-4367-b5c8-ea180f6fc189-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vsqh9\" (UID: \"16ca72ec-4fbd-4367-b5c8-ea180f6fc189\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vsqh9" Sep 29 22:06:56 crc kubenswrapper[4911]: I0929 22:06:56.435056 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/16ca72ec-4fbd-4367-b5c8-ea180f6fc189-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vsqh9\" (UID: \"16ca72ec-4fbd-4367-b5c8-ea180f6fc189\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vsqh9" Sep 29 22:06:56 crc kubenswrapper[4911]: I0929 22:06:56.435199 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/16ca72ec-4fbd-4367-b5c8-ea180f6fc189-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vsqh9\" (UID: \"16ca72ec-4fbd-4367-b5c8-ea180f6fc189\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vsqh9" Sep 29 22:06:56 crc kubenswrapper[4911]: I0929 22:06:56.435486 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/16ca72ec-4fbd-4367-b5c8-ea180f6fc189-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vsqh9\" (UID: \"16ca72ec-4fbd-4367-b5c8-ea180f6fc189\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vsqh9" Sep 29 22:06:56 crc kubenswrapper[4911]: I0929 22:06:56.436383 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16ca72ec-4fbd-4367-b5c8-ea180f6fc189-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vsqh9\" (UID: \"16ca72ec-4fbd-4367-b5c8-ea180f6fc189\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vsqh9" Sep 29 22:06:56 crc kubenswrapper[4911]: I0929 22:06:56.463533 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r7twr\" (UniqueName: \"kubernetes.io/projected/16ca72ec-4fbd-4367-b5c8-ea180f6fc189-kube-api-access-r7twr\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vsqh9\" (UID: \"16ca72ec-4fbd-4367-b5c8-ea180f6fc189\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vsqh9" Sep 29 22:06:56 crc kubenswrapper[4911]: I0929 22:06:56.492427 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vsqh9" Sep 29 22:06:57 crc kubenswrapper[4911]: I0929 22:06:57.023250 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vsqh9"] Sep 29 22:06:57 crc kubenswrapper[4911]: I0929 22:06:57.029913 4911 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 22:06:57 crc kubenswrapper[4911]: I0929 22:06:57.062658 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vsqh9" event={"ID":"16ca72ec-4fbd-4367-b5c8-ea180f6fc189","Type":"ContainerStarted","Data":"faecd9d3687908ada763c97d55446241ff6e12670ffa5a68a7b3967ac98681fc"} Sep 29 22:06:58 crc kubenswrapper[4911]: I0929 22:06:58.071876 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vsqh9" event={"ID":"16ca72ec-4fbd-4367-b5c8-ea180f6fc189","Type":"ContainerStarted","Data":"440c66e2aab02203ef82f417bf95986e99291c5c6fb22b0fc1c323e4979ba345"} Sep 29 22:06:58 crc kubenswrapper[4911]: I0929 22:06:58.094271 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vsqh9" podStartSLOduration=1.492629006 podStartE2EDuration="2.094252571s" podCreationTimestamp="2025-09-29 22:06:56 +0000 UTC" firstStartedPulling="2025-09-29 22:06:57.029658776 +0000 UTC m=+2495.006771447" lastFinishedPulling="2025-09-29 22:06:57.631282301 +0000 UTC m=+2495.608395012" observedRunningTime="2025-09-29 22:06:58.092121174 +0000 UTC m=+2496.069233915" watchObservedRunningTime="2025-09-29 22:06:58.094252571 +0000 UTC m=+2496.071365252" Sep 29 22:07:54 crc kubenswrapper[4911]: I0929 22:07:54.526394 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-5tlb9"] Sep 29 22:07:54 crc kubenswrapper[4911]: I0929 22:07:54.533661 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5tlb9" Sep 29 22:07:54 crc kubenswrapper[4911]: I0929 22:07:54.555115 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5tlb9"] Sep 29 22:07:54 crc kubenswrapper[4911]: I0929 22:07:54.651683 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d7398693-00ec-48a6-aee1-812d8ceac031-catalog-content\") pod \"redhat-operators-5tlb9\" (UID: \"d7398693-00ec-48a6-aee1-812d8ceac031\") " pod="openshift-marketplace/redhat-operators-5tlb9" Sep 29 22:07:54 crc kubenswrapper[4911]: I0929 22:07:54.652084 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d7398693-00ec-48a6-aee1-812d8ceac031-utilities\") pod \"redhat-operators-5tlb9\" (UID: \"d7398693-00ec-48a6-aee1-812d8ceac031\") " pod="openshift-marketplace/redhat-operators-5tlb9" Sep 29 22:07:54 crc kubenswrapper[4911]: I0929 22:07:54.652224 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lgwx5\" (UniqueName: \"kubernetes.io/projected/d7398693-00ec-48a6-aee1-812d8ceac031-kube-api-access-lgwx5\") pod \"redhat-operators-5tlb9\" (UID: \"d7398693-00ec-48a6-aee1-812d8ceac031\") " pod="openshift-marketplace/redhat-operators-5tlb9" Sep 29 22:07:54 crc kubenswrapper[4911]: I0929 22:07:54.753763 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d7398693-00ec-48a6-aee1-812d8ceac031-utilities\") pod \"redhat-operators-5tlb9\" (UID: \"d7398693-00ec-48a6-aee1-812d8ceac031\") " pod="openshift-marketplace/redhat-operators-5tlb9" Sep 29 22:07:54 crc kubenswrapper[4911]: I0929 22:07:54.754039 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lgwx5\" (UniqueName: \"kubernetes.io/projected/d7398693-00ec-48a6-aee1-812d8ceac031-kube-api-access-lgwx5\") pod \"redhat-operators-5tlb9\" (UID: \"d7398693-00ec-48a6-aee1-812d8ceac031\") " pod="openshift-marketplace/redhat-operators-5tlb9" Sep 29 22:07:54 crc kubenswrapper[4911]: I0929 22:07:54.754106 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d7398693-00ec-48a6-aee1-812d8ceac031-catalog-content\") pod \"redhat-operators-5tlb9\" (UID: \"d7398693-00ec-48a6-aee1-812d8ceac031\") " pod="openshift-marketplace/redhat-operators-5tlb9" Sep 29 22:07:54 crc kubenswrapper[4911]: I0929 22:07:54.754289 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d7398693-00ec-48a6-aee1-812d8ceac031-utilities\") pod \"redhat-operators-5tlb9\" (UID: \"d7398693-00ec-48a6-aee1-812d8ceac031\") " pod="openshift-marketplace/redhat-operators-5tlb9" Sep 29 22:07:54 crc kubenswrapper[4911]: I0929 22:07:54.754465 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d7398693-00ec-48a6-aee1-812d8ceac031-catalog-content\") pod \"redhat-operators-5tlb9\" (UID: \"d7398693-00ec-48a6-aee1-812d8ceac031\") " pod="openshift-marketplace/redhat-operators-5tlb9" Sep 29 22:07:54 crc kubenswrapper[4911]: I0929 22:07:54.786112 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lgwx5\" (UniqueName: \"kubernetes.io/projected/d7398693-00ec-48a6-aee1-812d8ceac031-kube-api-access-lgwx5\") pod \"redhat-operators-5tlb9\" (UID: \"d7398693-00ec-48a6-aee1-812d8ceac031\") " pod="openshift-marketplace/redhat-operators-5tlb9" Sep 29 22:07:54 crc kubenswrapper[4911]: I0929 22:07:54.864772 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5tlb9" Sep 29 22:07:55 crc kubenswrapper[4911]: I0929 22:07:55.346843 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5tlb9"] Sep 29 22:07:55 crc kubenswrapper[4911]: I0929 22:07:55.690462 4911 generic.go:334] "Generic (PLEG): container finished" podID="d7398693-00ec-48a6-aee1-812d8ceac031" containerID="89d3181f68c99d54e791fd0af2bdb1177ab32d7f71d4c7ca9190ea227d2905d5" exitCode=0 Sep 29 22:07:55 crc kubenswrapper[4911]: I0929 22:07:55.690521 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5tlb9" event={"ID":"d7398693-00ec-48a6-aee1-812d8ceac031","Type":"ContainerDied","Data":"89d3181f68c99d54e791fd0af2bdb1177ab32d7f71d4c7ca9190ea227d2905d5"} Sep 29 22:07:55 crc kubenswrapper[4911]: I0929 22:07:55.690828 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5tlb9" event={"ID":"d7398693-00ec-48a6-aee1-812d8ceac031","Type":"ContainerStarted","Data":"b189f34667d38a3769f9f02ef596cba8db52e12baa87549829a9ed795081ca00"} Sep 29 22:07:56 crc kubenswrapper[4911]: I0929 22:07:56.718468 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5tlb9" event={"ID":"d7398693-00ec-48a6-aee1-812d8ceac031","Type":"ContainerStarted","Data":"45c1046d3d7ce33f2fa568560c867fbe7f0e242eb4d8f1fc1e660f3c0e6b9f03"} Sep 29 22:07:57 crc kubenswrapper[4911]: I0929 22:07:57.719690 4911 generic.go:334] "Generic (PLEG): container finished" podID="d7398693-00ec-48a6-aee1-812d8ceac031" containerID="45c1046d3d7ce33f2fa568560c867fbe7f0e242eb4d8f1fc1e660f3c0e6b9f03" exitCode=0 Sep 29 22:07:57 crc kubenswrapper[4911]: I0929 22:07:57.719735 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5tlb9" event={"ID":"d7398693-00ec-48a6-aee1-812d8ceac031","Type":"ContainerDied","Data":"45c1046d3d7ce33f2fa568560c867fbe7f0e242eb4d8f1fc1e660f3c0e6b9f03"} Sep 29 22:07:58 crc kubenswrapper[4911]: I0929 22:07:58.729759 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5tlb9" event={"ID":"d7398693-00ec-48a6-aee1-812d8ceac031","Type":"ContainerStarted","Data":"30177e0aebf5338b4696fbd95410560788e5234e7794f52ddb506a39b604b1b3"} Sep 29 22:07:58 crc kubenswrapper[4911]: I0929 22:07:58.770858 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-5tlb9" podStartSLOduration=2.272293881 podStartE2EDuration="4.770828561s" podCreationTimestamp="2025-09-29 22:07:54 +0000 UTC" firstStartedPulling="2025-09-29 22:07:55.692020343 +0000 UTC m=+2553.669133014" lastFinishedPulling="2025-09-29 22:07:58.190555023 +0000 UTC m=+2556.167667694" observedRunningTime="2025-09-29 22:07:58.751836307 +0000 UTC m=+2556.728948998" watchObservedRunningTime="2025-09-29 22:07:58.770828561 +0000 UTC m=+2556.747941262" Sep 29 22:08:04 crc kubenswrapper[4911]: I0929 22:08:04.866023 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-5tlb9" Sep 29 22:08:04 crc kubenswrapper[4911]: I0929 22:08:04.866661 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-5tlb9" Sep 29 22:08:04 crc kubenswrapper[4911]: I0929 22:08:04.918874 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-5tlb9" Sep 29 22:08:05 crc kubenswrapper[4911]: I0929 22:08:05.894731 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-5tlb9" Sep 29 22:08:05 crc kubenswrapper[4911]: I0929 22:08:05.958359 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-5tlb9"] Sep 29 22:08:07 crc kubenswrapper[4911]: I0929 22:08:07.846039 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-5tlb9" podUID="d7398693-00ec-48a6-aee1-812d8ceac031" containerName="registry-server" containerID="cri-o://30177e0aebf5338b4696fbd95410560788e5234e7794f52ddb506a39b604b1b3" gracePeriod=2 Sep 29 22:08:08 crc kubenswrapper[4911]: I0929 22:08:08.857138 4911 generic.go:334] "Generic (PLEG): container finished" podID="d7398693-00ec-48a6-aee1-812d8ceac031" containerID="30177e0aebf5338b4696fbd95410560788e5234e7794f52ddb506a39b604b1b3" exitCode=0 Sep 29 22:08:08 crc kubenswrapper[4911]: I0929 22:08:08.857212 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5tlb9" event={"ID":"d7398693-00ec-48a6-aee1-812d8ceac031","Type":"ContainerDied","Data":"30177e0aebf5338b4696fbd95410560788e5234e7794f52ddb506a39b604b1b3"} Sep 29 22:08:08 crc kubenswrapper[4911]: I0929 22:08:08.986217 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5tlb9" Sep 29 22:08:09 crc kubenswrapper[4911]: I0929 22:08:09.129358 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lgwx5\" (UniqueName: \"kubernetes.io/projected/d7398693-00ec-48a6-aee1-812d8ceac031-kube-api-access-lgwx5\") pod \"d7398693-00ec-48a6-aee1-812d8ceac031\" (UID: \"d7398693-00ec-48a6-aee1-812d8ceac031\") " Sep 29 22:08:09 crc kubenswrapper[4911]: I0929 22:08:09.129648 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d7398693-00ec-48a6-aee1-812d8ceac031-utilities\") pod \"d7398693-00ec-48a6-aee1-812d8ceac031\" (UID: \"d7398693-00ec-48a6-aee1-812d8ceac031\") " Sep 29 22:08:09 crc kubenswrapper[4911]: I0929 22:08:09.129820 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d7398693-00ec-48a6-aee1-812d8ceac031-catalog-content\") pod \"d7398693-00ec-48a6-aee1-812d8ceac031\" (UID: \"d7398693-00ec-48a6-aee1-812d8ceac031\") " Sep 29 22:08:09 crc kubenswrapper[4911]: I0929 22:08:09.130599 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d7398693-00ec-48a6-aee1-812d8ceac031-utilities" (OuterVolumeSpecName: "utilities") pod "d7398693-00ec-48a6-aee1-812d8ceac031" (UID: "d7398693-00ec-48a6-aee1-812d8ceac031"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:08:09 crc kubenswrapper[4911]: I0929 22:08:09.137064 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d7398693-00ec-48a6-aee1-812d8ceac031-kube-api-access-lgwx5" (OuterVolumeSpecName: "kube-api-access-lgwx5") pod "d7398693-00ec-48a6-aee1-812d8ceac031" (UID: "d7398693-00ec-48a6-aee1-812d8ceac031"). InnerVolumeSpecName "kube-api-access-lgwx5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:08:09 crc kubenswrapper[4911]: I0929 22:08:09.230429 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d7398693-00ec-48a6-aee1-812d8ceac031-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d7398693-00ec-48a6-aee1-812d8ceac031" (UID: "d7398693-00ec-48a6-aee1-812d8ceac031"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:08:09 crc kubenswrapper[4911]: I0929 22:08:09.231612 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lgwx5\" (UniqueName: \"kubernetes.io/projected/d7398693-00ec-48a6-aee1-812d8ceac031-kube-api-access-lgwx5\") on node \"crc\" DevicePath \"\"" Sep 29 22:08:09 crc kubenswrapper[4911]: I0929 22:08:09.231642 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d7398693-00ec-48a6-aee1-812d8ceac031-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 22:08:09 crc kubenswrapper[4911]: I0929 22:08:09.231652 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d7398693-00ec-48a6-aee1-812d8ceac031-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 22:08:09 crc kubenswrapper[4911]: I0929 22:08:09.868896 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5tlb9" event={"ID":"d7398693-00ec-48a6-aee1-812d8ceac031","Type":"ContainerDied","Data":"b189f34667d38a3769f9f02ef596cba8db52e12baa87549829a9ed795081ca00"} Sep 29 22:08:09 crc kubenswrapper[4911]: I0929 22:08:09.868968 4911 scope.go:117] "RemoveContainer" containerID="30177e0aebf5338b4696fbd95410560788e5234e7794f52ddb506a39b604b1b3" Sep 29 22:08:09 crc kubenswrapper[4911]: I0929 22:08:09.869191 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5tlb9" Sep 29 22:08:09 crc kubenswrapper[4911]: I0929 22:08:09.891421 4911 scope.go:117] "RemoveContainer" containerID="45c1046d3d7ce33f2fa568560c867fbe7f0e242eb4d8f1fc1e660f3c0e6b9f03" Sep 29 22:08:09 crc kubenswrapper[4911]: I0929 22:08:09.913083 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-5tlb9"] Sep 29 22:08:09 crc kubenswrapper[4911]: I0929 22:08:09.921020 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-5tlb9"] Sep 29 22:08:09 crc kubenswrapper[4911]: I0929 22:08:09.933724 4911 scope.go:117] "RemoveContainer" containerID="89d3181f68c99d54e791fd0af2bdb1177ab32d7f71d4c7ca9190ea227d2905d5" Sep 29 22:08:10 crc kubenswrapper[4911]: I0929 22:08:10.715273 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d7398693-00ec-48a6-aee1-812d8ceac031" path="/var/lib/kubelet/pods/d7398693-00ec-48a6-aee1-812d8ceac031/volumes" Sep 29 22:08:55 crc kubenswrapper[4911]: I0929 22:08:55.211536 4911 patch_prober.go:28] interesting pod/machine-config-daemon-w647f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 22:08:55 crc kubenswrapper[4911]: I0929 22:08:55.212184 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 22:08:59 crc kubenswrapper[4911]: I0929 22:08:59.243870 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-hq5gz"] Sep 29 22:08:59 crc kubenswrapper[4911]: E0929 22:08:59.244855 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7398693-00ec-48a6-aee1-812d8ceac031" containerName="registry-server" Sep 29 22:08:59 crc kubenswrapper[4911]: I0929 22:08:59.244874 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7398693-00ec-48a6-aee1-812d8ceac031" containerName="registry-server" Sep 29 22:08:59 crc kubenswrapper[4911]: E0929 22:08:59.244893 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7398693-00ec-48a6-aee1-812d8ceac031" containerName="extract-utilities" Sep 29 22:08:59 crc kubenswrapper[4911]: I0929 22:08:59.244900 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7398693-00ec-48a6-aee1-812d8ceac031" containerName="extract-utilities" Sep 29 22:08:59 crc kubenswrapper[4911]: E0929 22:08:59.244912 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7398693-00ec-48a6-aee1-812d8ceac031" containerName="extract-content" Sep 29 22:08:59 crc kubenswrapper[4911]: I0929 22:08:59.244919 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7398693-00ec-48a6-aee1-812d8ceac031" containerName="extract-content" Sep 29 22:08:59 crc kubenswrapper[4911]: I0929 22:08:59.245173 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="d7398693-00ec-48a6-aee1-812d8ceac031" containerName="registry-server" Sep 29 22:08:59 crc kubenswrapper[4911]: I0929 22:08:59.249226 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hq5gz" Sep 29 22:08:59 crc kubenswrapper[4911]: I0929 22:08:59.259202 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-hq5gz"] Sep 29 22:08:59 crc kubenswrapper[4911]: I0929 22:08:59.375876 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-skpbl\" (UniqueName: \"kubernetes.io/projected/653d35c6-3971-4326-9510-6def96d40bd8-kube-api-access-skpbl\") pod \"redhat-marketplace-hq5gz\" (UID: \"653d35c6-3971-4326-9510-6def96d40bd8\") " pod="openshift-marketplace/redhat-marketplace-hq5gz" Sep 29 22:08:59 crc kubenswrapper[4911]: I0929 22:08:59.375944 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/653d35c6-3971-4326-9510-6def96d40bd8-catalog-content\") pod \"redhat-marketplace-hq5gz\" (UID: \"653d35c6-3971-4326-9510-6def96d40bd8\") " pod="openshift-marketplace/redhat-marketplace-hq5gz" Sep 29 22:08:59 crc kubenswrapper[4911]: I0929 22:08:59.376188 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/653d35c6-3971-4326-9510-6def96d40bd8-utilities\") pod \"redhat-marketplace-hq5gz\" (UID: \"653d35c6-3971-4326-9510-6def96d40bd8\") " pod="openshift-marketplace/redhat-marketplace-hq5gz" Sep 29 22:08:59 crc kubenswrapper[4911]: I0929 22:08:59.478324 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-skpbl\" (UniqueName: \"kubernetes.io/projected/653d35c6-3971-4326-9510-6def96d40bd8-kube-api-access-skpbl\") pod \"redhat-marketplace-hq5gz\" (UID: \"653d35c6-3971-4326-9510-6def96d40bd8\") " pod="openshift-marketplace/redhat-marketplace-hq5gz" Sep 29 22:08:59 crc kubenswrapper[4911]: I0929 22:08:59.478383 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/653d35c6-3971-4326-9510-6def96d40bd8-catalog-content\") pod \"redhat-marketplace-hq5gz\" (UID: \"653d35c6-3971-4326-9510-6def96d40bd8\") " pod="openshift-marketplace/redhat-marketplace-hq5gz" Sep 29 22:08:59 crc kubenswrapper[4911]: I0929 22:08:59.478443 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/653d35c6-3971-4326-9510-6def96d40bd8-utilities\") pod \"redhat-marketplace-hq5gz\" (UID: \"653d35c6-3971-4326-9510-6def96d40bd8\") " pod="openshift-marketplace/redhat-marketplace-hq5gz" Sep 29 22:08:59 crc kubenswrapper[4911]: I0929 22:08:59.479415 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/653d35c6-3971-4326-9510-6def96d40bd8-utilities\") pod \"redhat-marketplace-hq5gz\" (UID: \"653d35c6-3971-4326-9510-6def96d40bd8\") " pod="openshift-marketplace/redhat-marketplace-hq5gz" Sep 29 22:08:59 crc kubenswrapper[4911]: I0929 22:08:59.479426 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/653d35c6-3971-4326-9510-6def96d40bd8-catalog-content\") pod \"redhat-marketplace-hq5gz\" (UID: \"653d35c6-3971-4326-9510-6def96d40bd8\") " pod="openshift-marketplace/redhat-marketplace-hq5gz" Sep 29 22:08:59 crc kubenswrapper[4911]: I0929 22:08:59.502048 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-skpbl\" (UniqueName: \"kubernetes.io/projected/653d35c6-3971-4326-9510-6def96d40bd8-kube-api-access-skpbl\") pod \"redhat-marketplace-hq5gz\" (UID: \"653d35c6-3971-4326-9510-6def96d40bd8\") " pod="openshift-marketplace/redhat-marketplace-hq5gz" Sep 29 22:08:59 crc kubenswrapper[4911]: I0929 22:08:59.573749 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hq5gz" Sep 29 22:09:00 crc kubenswrapper[4911]: I0929 22:09:00.040508 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-hq5gz"] Sep 29 22:09:00 crc kubenswrapper[4911]: W0929 22:09:00.042011 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod653d35c6_3971_4326_9510_6def96d40bd8.slice/crio-6d2408c7c231018e0429da53ff6b450afc808f6d5a0f30b25c139c03e59024cc WatchSource:0}: Error finding container 6d2408c7c231018e0429da53ff6b450afc808f6d5a0f30b25c139c03e59024cc: Status 404 returned error can't find the container with id 6d2408c7c231018e0429da53ff6b450afc808f6d5a0f30b25c139c03e59024cc Sep 29 22:09:00 crc kubenswrapper[4911]: I0929 22:09:00.351423 4911 generic.go:334] "Generic (PLEG): container finished" podID="653d35c6-3971-4326-9510-6def96d40bd8" containerID="99ea44c434ec6320f191d9e463f725cda4628ba3052c0a052f784a489dd6635f" exitCode=0 Sep 29 22:09:00 crc kubenswrapper[4911]: I0929 22:09:00.351505 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hq5gz" event={"ID":"653d35c6-3971-4326-9510-6def96d40bd8","Type":"ContainerDied","Data":"99ea44c434ec6320f191d9e463f725cda4628ba3052c0a052f784a489dd6635f"} Sep 29 22:09:00 crc kubenswrapper[4911]: I0929 22:09:00.351779 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hq5gz" event={"ID":"653d35c6-3971-4326-9510-6def96d40bd8","Type":"ContainerStarted","Data":"6d2408c7c231018e0429da53ff6b450afc808f6d5a0f30b25c139c03e59024cc"} Sep 29 22:09:02 crc kubenswrapper[4911]: I0929 22:09:02.372344 4911 generic.go:334] "Generic (PLEG): container finished" podID="653d35c6-3971-4326-9510-6def96d40bd8" containerID="04f328fd2542b675551cfcf2b197106e9694560244b41b1e7cab1e6a602d1f35" exitCode=0 Sep 29 22:09:02 crc kubenswrapper[4911]: I0929 22:09:02.372392 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hq5gz" event={"ID":"653d35c6-3971-4326-9510-6def96d40bd8","Type":"ContainerDied","Data":"04f328fd2542b675551cfcf2b197106e9694560244b41b1e7cab1e6a602d1f35"} Sep 29 22:09:03 crc kubenswrapper[4911]: I0929 22:09:03.387276 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hq5gz" event={"ID":"653d35c6-3971-4326-9510-6def96d40bd8","Type":"ContainerStarted","Data":"1924f14a42c470eede26bc341a3608dd6132ec701d9788f0dcc1614d72c14232"} Sep 29 22:09:03 crc kubenswrapper[4911]: I0929 22:09:03.419216 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-hq5gz" podStartSLOduration=1.9160556560000002 podStartE2EDuration="4.419190391s" podCreationTimestamp="2025-09-29 22:08:59 +0000 UTC" firstStartedPulling="2025-09-29 22:09:00.353483372 +0000 UTC m=+2618.330596083" lastFinishedPulling="2025-09-29 22:09:02.856618147 +0000 UTC m=+2620.833730818" observedRunningTime="2025-09-29 22:09:03.413295517 +0000 UTC m=+2621.390408188" watchObservedRunningTime="2025-09-29 22:09:03.419190391 +0000 UTC m=+2621.396303092" Sep 29 22:09:09 crc kubenswrapper[4911]: I0929 22:09:09.574745 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-hq5gz" Sep 29 22:09:09 crc kubenswrapper[4911]: I0929 22:09:09.575487 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-hq5gz" Sep 29 22:09:09 crc kubenswrapper[4911]: I0929 22:09:09.652573 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-hq5gz" Sep 29 22:09:10 crc kubenswrapper[4911]: I0929 22:09:10.522041 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-hq5gz" Sep 29 22:09:10 crc kubenswrapper[4911]: I0929 22:09:10.593380 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-hq5gz"] Sep 29 22:09:12 crc kubenswrapper[4911]: I0929 22:09:12.476103 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-hq5gz" podUID="653d35c6-3971-4326-9510-6def96d40bd8" containerName="registry-server" containerID="cri-o://1924f14a42c470eede26bc341a3608dd6132ec701d9788f0dcc1614d72c14232" gracePeriod=2 Sep 29 22:09:12 crc kubenswrapper[4911]: I0929 22:09:12.939061 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hq5gz" Sep 29 22:09:13 crc kubenswrapper[4911]: I0929 22:09:13.062671 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/653d35c6-3971-4326-9510-6def96d40bd8-utilities\") pod \"653d35c6-3971-4326-9510-6def96d40bd8\" (UID: \"653d35c6-3971-4326-9510-6def96d40bd8\") " Sep 29 22:09:13 crc kubenswrapper[4911]: I0929 22:09:13.062853 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-skpbl\" (UniqueName: \"kubernetes.io/projected/653d35c6-3971-4326-9510-6def96d40bd8-kube-api-access-skpbl\") pod \"653d35c6-3971-4326-9510-6def96d40bd8\" (UID: \"653d35c6-3971-4326-9510-6def96d40bd8\") " Sep 29 22:09:13 crc kubenswrapper[4911]: I0929 22:09:13.063090 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/653d35c6-3971-4326-9510-6def96d40bd8-catalog-content\") pod \"653d35c6-3971-4326-9510-6def96d40bd8\" (UID: \"653d35c6-3971-4326-9510-6def96d40bd8\") " Sep 29 22:09:13 crc kubenswrapper[4911]: I0929 22:09:13.064166 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/653d35c6-3971-4326-9510-6def96d40bd8-utilities" (OuterVolumeSpecName: "utilities") pod "653d35c6-3971-4326-9510-6def96d40bd8" (UID: "653d35c6-3971-4326-9510-6def96d40bd8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:09:13 crc kubenswrapper[4911]: I0929 22:09:13.076222 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/653d35c6-3971-4326-9510-6def96d40bd8-kube-api-access-skpbl" (OuterVolumeSpecName: "kube-api-access-skpbl") pod "653d35c6-3971-4326-9510-6def96d40bd8" (UID: "653d35c6-3971-4326-9510-6def96d40bd8"). InnerVolumeSpecName "kube-api-access-skpbl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:09:13 crc kubenswrapper[4911]: I0929 22:09:13.076308 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/653d35c6-3971-4326-9510-6def96d40bd8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "653d35c6-3971-4326-9510-6def96d40bd8" (UID: "653d35c6-3971-4326-9510-6def96d40bd8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:09:13 crc kubenswrapper[4911]: I0929 22:09:13.165641 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/653d35c6-3971-4326-9510-6def96d40bd8-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 22:09:13 crc kubenswrapper[4911]: I0929 22:09:13.165682 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-skpbl\" (UniqueName: \"kubernetes.io/projected/653d35c6-3971-4326-9510-6def96d40bd8-kube-api-access-skpbl\") on node \"crc\" DevicePath \"\"" Sep 29 22:09:13 crc kubenswrapper[4911]: I0929 22:09:13.165696 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/653d35c6-3971-4326-9510-6def96d40bd8-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 22:09:13 crc kubenswrapper[4911]: I0929 22:09:13.490286 4911 generic.go:334] "Generic (PLEG): container finished" podID="653d35c6-3971-4326-9510-6def96d40bd8" containerID="1924f14a42c470eede26bc341a3608dd6132ec701d9788f0dcc1614d72c14232" exitCode=0 Sep 29 22:09:13 crc kubenswrapper[4911]: I0929 22:09:13.490331 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hq5gz" event={"ID":"653d35c6-3971-4326-9510-6def96d40bd8","Type":"ContainerDied","Data":"1924f14a42c470eede26bc341a3608dd6132ec701d9788f0dcc1614d72c14232"} Sep 29 22:09:13 crc kubenswrapper[4911]: I0929 22:09:13.490381 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hq5gz" Sep 29 22:09:13 crc kubenswrapper[4911]: I0929 22:09:13.490400 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hq5gz" event={"ID":"653d35c6-3971-4326-9510-6def96d40bd8","Type":"ContainerDied","Data":"6d2408c7c231018e0429da53ff6b450afc808f6d5a0f30b25c139c03e59024cc"} Sep 29 22:09:13 crc kubenswrapper[4911]: I0929 22:09:13.490447 4911 scope.go:117] "RemoveContainer" containerID="1924f14a42c470eede26bc341a3608dd6132ec701d9788f0dcc1614d72c14232" Sep 29 22:09:13 crc kubenswrapper[4911]: I0929 22:09:13.524945 4911 scope.go:117] "RemoveContainer" containerID="04f328fd2542b675551cfcf2b197106e9694560244b41b1e7cab1e6a602d1f35" Sep 29 22:09:13 crc kubenswrapper[4911]: I0929 22:09:13.542954 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-hq5gz"] Sep 29 22:09:13 crc kubenswrapper[4911]: I0929 22:09:13.553915 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-hq5gz"] Sep 29 22:09:13 crc kubenswrapper[4911]: I0929 22:09:13.570704 4911 scope.go:117] "RemoveContainer" containerID="99ea44c434ec6320f191d9e463f725cda4628ba3052c0a052f784a489dd6635f" Sep 29 22:09:13 crc kubenswrapper[4911]: I0929 22:09:13.613670 4911 scope.go:117] "RemoveContainer" containerID="1924f14a42c470eede26bc341a3608dd6132ec701d9788f0dcc1614d72c14232" Sep 29 22:09:13 crc kubenswrapper[4911]: E0929 22:09:13.614526 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1924f14a42c470eede26bc341a3608dd6132ec701d9788f0dcc1614d72c14232\": container with ID starting with 1924f14a42c470eede26bc341a3608dd6132ec701d9788f0dcc1614d72c14232 not found: ID does not exist" containerID="1924f14a42c470eede26bc341a3608dd6132ec701d9788f0dcc1614d72c14232" Sep 29 22:09:13 crc kubenswrapper[4911]: I0929 22:09:13.614595 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1924f14a42c470eede26bc341a3608dd6132ec701d9788f0dcc1614d72c14232"} err="failed to get container status \"1924f14a42c470eede26bc341a3608dd6132ec701d9788f0dcc1614d72c14232\": rpc error: code = NotFound desc = could not find container \"1924f14a42c470eede26bc341a3608dd6132ec701d9788f0dcc1614d72c14232\": container with ID starting with 1924f14a42c470eede26bc341a3608dd6132ec701d9788f0dcc1614d72c14232 not found: ID does not exist" Sep 29 22:09:13 crc kubenswrapper[4911]: I0929 22:09:13.614640 4911 scope.go:117] "RemoveContainer" containerID="04f328fd2542b675551cfcf2b197106e9694560244b41b1e7cab1e6a602d1f35" Sep 29 22:09:13 crc kubenswrapper[4911]: E0929 22:09:13.615169 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"04f328fd2542b675551cfcf2b197106e9694560244b41b1e7cab1e6a602d1f35\": container with ID starting with 04f328fd2542b675551cfcf2b197106e9694560244b41b1e7cab1e6a602d1f35 not found: ID does not exist" containerID="04f328fd2542b675551cfcf2b197106e9694560244b41b1e7cab1e6a602d1f35" Sep 29 22:09:13 crc kubenswrapper[4911]: I0929 22:09:13.615206 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"04f328fd2542b675551cfcf2b197106e9694560244b41b1e7cab1e6a602d1f35"} err="failed to get container status \"04f328fd2542b675551cfcf2b197106e9694560244b41b1e7cab1e6a602d1f35\": rpc error: code = NotFound desc = could not find container \"04f328fd2542b675551cfcf2b197106e9694560244b41b1e7cab1e6a602d1f35\": container with ID starting with 04f328fd2542b675551cfcf2b197106e9694560244b41b1e7cab1e6a602d1f35 not found: ID does not exist" Sep 29 22:09:13 crc kubenswrapper[4911]: I0929 22:09:13.615227 4911 scope.go:117] "RemoveContainer" containerID="99ea44c434ec6320f191d9e463f725cda4628ba3052c0a052f784a489dd6635f" Sep 29 22:09:13 crc kubenswrapper[4911]: E0929 22:09:13.615710 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"99ea44c434ec6320f191d9e463f725cda4628ba3052c0a052f784a489dd6635f\": container with ID starting with 99ea44c434ec6320f191d9e463f725cda4628ba3052c0a052f784a489dd6635f not found: ID does not exist" containerID="99ea44c434ec6320f191d9e463f725cda4628ba3052c0a052f784a489dd6635f" Sep 29 22:09:13 crc kubenswrapper[4911]: I0929 22:09:13.615826 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"99ea44c434ec6320f191d9e463f725cda4628ba3052c0a052f784a489dd6635f"} err="failed to get container status \"99ea44c434ec6320f191d9e463f725cda4628ba3052c0a052f784a489dd6635f\": rpc error: code = NotFound desc = could not find container \"99ea44c434ec6320f191d9e463f725cda4628ba3052c0a052f784a489dd6635f\": container with ID starting with 99ea44c434ec6320f191d9e463f725cda4628ba3052c0a052f784a489dd6635f not found: ID does not exist" Sep 29 22:09:14 crc kubenswrapper[4911]: I0929 22:09:14.718180 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="653d35c6-3971-4326-9510-6def96d40bd8" path="/var/lib/kubelet/pods/653d35c6-3971-4326-9510-6def96d40bd8/volumes" Sep 29 22:09:23 crc kubenswrapper[4911]: I0929 22:09:23.609371 4911 generic.go:334] "Generic (PLEG): container finished" podID="16ca72ec-4fbd-4367-b5c8-ea180f6fc189" containerID="440c66e2aab02203ef82f417bf95986e99291c5c6fb22b0fc1c323e4979ba345" exitCode=0 Sep 29 22:09:23 crc kubenswrapper[4911]: I0929 22:09:23.609477 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vsqh9" event={"ID":"16ca72ec-4fbd-4367-b5c8-ea180f6fc189","Type":"ContainerDied","Data":"440c66e2aab02203ef82f417bf95986e99291c5c6fb22b0fc1c323e4979ba345"} Sep 29 22:09:25 crc kubenswrapper[4911]: I0929 22:09:25.043698 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vsqh9" Sep 29 22:09:25 crc kubenswrapper[4911]: I0929 22:09:25.107303 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/16ca72ec-4fbd-4367-b5c8-ea180f6fc189-ceilometer-compute-config-data-0\") pod \"16ca72ec-4fbd-4367-b5c8-ea180f6fc189\" (UID: \"16ca72ec-4fbd-4367-b5c8-ea180f6fc189\") " Sep 29 22:09:25 crc kubenswrapper[4911]: I0929 22:09:25.107360 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16ca72ec-4fbd-4367-b5c8-ea180f6fc189-telemetry-combined-ca-bundle\") pod \"16ca72ec-4fbd-4367-b5c8-ea180f6fc189\" (UID: \"16ca72ec-4fbd-4367-b5c8-ea180f6fc189\") " Sep 29 22:09:25 crc kubenswrapper[4911]: I0929 22:09:25.107402 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/16ca72ec-4fbd-4367-b5c8-ea180f6fc189-ssh-key\") pod \"16ca72ec-4fbd-4367-b5c8-ea180f6fc189\" (UID: \"16ca72ec-4fbd-4367-b5c8-ea180f6fc189\") " Sep 29 22:09:25 crc kubenswrapper[4911]: I0929 22:09:25.107638 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/16ca72ec-4fbd-4367-b5c8-ea180f6fc189-ceilometer-compute-config-data-1\") pod \"16ca72ec-4fbd-4367-b5c8-ea180f6fc189\" (UID: \"16ca72ec-4fbd-4367-b5c8-ea180f6fc189\") " Sep 29 22:09:25 crc kubenswrapper[4911]: I0929 22:09:25.107735 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/16ca72ec-4fbd-4367-b5c8-ea180f6fc189-ceilometer-compute-config-data-2\") pod \"16ca72ec-4fbd-4367-b5c8-ea180f6fc189\" (UID: \"16ca72ec-4fbd-4367-b5c8-ea180f6fc189\") " Sep 29 22:09:25 crc kubenswrapper[4911]: I0929 22:09:25.107767 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/16ca72ec-4fbd-4367-b5c8-ea180f6fc189-inventory\") pod \"16ca72ec-4fbd-4367-b5c8-ea180f6fc189\" (UID: \"16ca72ec-4fbd-4367-b5c8-ea180f6fc189\") " Sep 29 22:09:25 crc kubenswrapper[4911]: I0929 22:09:25.108235 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r7twr\" (UniqueName: \"kubernetes.io/projected/16ca72ec-4fbd-4367-b5c8-ea180f6fc189-kube-api-access-r7twr\") pod \"16ca72ec-4fbd-4367-b5c8-ea180f6fc189\" (UID: \"16ca72ec-4fbd-4367-b5c8-ea180f6fc189\") " Sep 29 22:09:25 crc kubenswrapper[4911]: I0929 22:09:25.112480 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16ca72ec-4fbd-4367-b5c8-ea180f6fc189-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "16ca72ec-4fbd-4367-b5c8-ea180f6fc189" (UID: "16ca72ec-4fbd-4367-b5c8-ea180f6fc189"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:09:25 crc kubenswrapper[4911]: I0929 22:09:25.115704 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/16ca72ec-4fbd-4367-b5c8-ea180f6fc189-kube-api-access-r7twr" (OuterVolumeSpecName: "kube-api-access-r7twr") pod "16ca72ec-4fbd-4367-b5c8-ea180f6fc189" (UID: "16ca72ec-4fbd-4367-b5c8-ea180f6fc189"). InnerVolumeSpecName "kube-api-access-r7twr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:09:25 crc kubenswrapper[4911]: I0929 22:09:25.137652 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16ca72ec-4fbd-4367-b5c8-ea180f6fc189-inventory" (OuterVolumeSpecName: "inventory") pod "16ca72ec-4fbd-4367-b5c8-ea180f6fc189" (UID: "16ca72ec-4fbd-4367-b5c8-ea180f6fc189"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:09:25 crc kubenswrapper[4911]: I0929 22:09:25.138906 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16ca72ec-4fbd-4367-b5c8-ea180f6fc189-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "16ca72ec-4fbd-4367-b5c8-ea180f6fc189" (UID: "16ca72ec-4fbd-4367-b5c8-ea180f6fc189"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:09:25 crc kubenswrapper[4911]: I0929 22:09:25.138986 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16ca72ec-4fbd-4367-b5c8-ea180f6fc189-ceilometer-compute-config-data-1" (OuterVolumeSpecName: "ceilometer-compute-config-data-1") pod "16ca72ec-4fbd-4367-b5c8-ea180f6fc189" (UID: "16ca72ec-4fbd-4367-b5c8-ea180f6fc189"). InnerVolumeSpecName "ceilometer-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:09:25 crc kubenswrapper[4911]: I0929 22:09:25.139622 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16ca72ec-4fbd-4367-b5c8-ea180f6fc189-ceilometer-compute-config-data-2" (OuterVolumeSpecName: "ceilometer-compute-config-data-2") pod "16ca72ec-4fbd-4367-b5c8-ea180f6fc189" (UID: "16ca72ec-4fbd-4367-b5c8-ea180f6fc189"). InnerVolumeSpecName "ceilometer-compute-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:09:25 crc kubenswrapper[4911]: I0929 22:09:25.146638 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16ca72ec-4fbd-4367-b5c8-ea180f6fc189-ceilometer-compute-config-data-0" (OuterVolumeSpecName: "ceilometer-compute-config-data-0") pod "16ca72ec-4fbd-4367-b5c8-ea180f6fc189" (UID: "16ca72ec-4fbd-4367-b5c8-ea180f6fc189"). InnerVolumeSpecName "ceilometer-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:09:25 crc kubenswrapper[4911]: I0929 22:09:25.210979 4911 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/16ca72ec-4fbd-4367-b5c8-ea180f6fc189-ceilometer-compute-config-data-2\") on node \"crc\" DevicePath \"\"" Sep 29 22:09:25 crc kubenswrapper[4911]: I0929 22:09:25.211016 4911 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/16ca72ec-4fbd-4367-b5c8-ea180f6fc189-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 22:09:25 crc kubenswrapper[4911]: I0929 22:09:25.211028 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r7twr\" (UniqueName: \"kubernetes.io/projected/16ca72ec-4fbd-4367-b5c8-ea180f6fc189-kube-api-access-r7twr\") on node \"crc\" DevicePath \"\"" Sep 29 22:09:25 crc kubenswrapper[4911]: I0929 22:09:25.211038 4911 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/16ca72ec-4fbd-4367-b5c8-ea180f6fc189-ceilometer-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Sep 29 22:09:25 crc kubenswrapper[4911]: I0929 22:09:25.211047 4911 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16ca72ec-4fbd-4367-b5c8-ea180f6fc189-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:09:25 crc kubenswrapper[4911]: I0929 22:09:25.211057 4911 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/16ca72ec-4fbd-4367-b5c8-ea180f6fc189-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 22:09:25 crc kubenswrapper[4911]: I0929 22:09:25.211065 4911 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/16ca72ec-4fbd-4367-b5c8-ea180f6fc189-ceilometer-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Sep 29 22:09:25 crc kubenswrapper[4911]: I0929 22:09:25.211348 4911 patch_prober.go:28] interesting pod/machine-config-daemon-w647f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 22:09:25 crc kubenswrapper[4911]: I0929 22:09:25.211495 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 22:09:25 crc kubenswrapper[4911]: I0929 22:09:25.637494 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vsqh9" event={"ID":"16ca72ec-4fbd-4367-b5c8-ea180f6fc189","Type":"ContainerDied","Data":"faecd9d3687908ada763c97d55446241ff6e12670ffa5a68a7b3967ac98681fc"} Sep 29 22:09:25 crc kubenswrapper[4911]: I0929 22:09:25.637542 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="faecd9d3687908ada763c97d55446241ff6e12670ffa5a68a7b3967ac98681fc" Sep 29 22:09:25 crc kubenswrapper[4911]: I0929 22:09:25.637596 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vsqh9" Sep 29 22:09:32 crc kubenswrapper[4911]: I0929 22:09:32.804954 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 22:09:32 crc kubenswrapper[4911]: I0929 22:09:32.805578 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="9983d5ff-ae31-4562-a659-9acb5742e5e7" containerName="kube-state-metrics" containerID="cri-o://5a575dcdcebadb59bafff257f7e51e02f8abe8b180b82d01a5f1380797b38cc3" gracePeriod=30 Sep 29 22:09:32 crc kubenswrapper[4911]: I0929 22:09:32.985860 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 22:09:32 crc kubenswrapper[4911]: I0929 22:09:32.986215 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f523c771-a76c-4854-a62f-85e929e1a24b" containerName="ceilometer-notification-agent" containerID="cri-o://522d39b33961c43600a768ab5e6c9aa1c0920cde5a1eb5291326ffb5ccf952d0" gracePeriod=30 Sep 29 22:09:32 crc kubenswrapper[4911]: I0929 22:09:32.986738 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f523c771-a76c-4854-a62f-85e929e1a24b" containerName="ceilometer-central-agent" containerID="cri-o://0998db1c2481e429d6d3cf8cf9810741850cbc9e998af86ada61bcdb74e88952" gracePeriod=30 Sep 29 22:09:32 crc kubenswrapper[4911]: I0929 22:09:32.986808 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f523c771-a76c-4854-a62f-85e929e1a24b" containerName="proxy-httpd" containerID="cri-o://e6b054ee7cb44cc6ca8c99c0dd03f90a290454ed24e04ef5ec6ed6badde62656" gracePeriod=30 Sep 29 22:09:32 crc kubenswrapper[4911]: I0929 22:09:32.986852 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f523c771-a76c-4854-a62f-85e929e1a24b" containerName="sg-core" containerID="cri-o://90e66e9c61203eac6ff6251eca642f545676666c48546e22f0fe2ec932b12715" gracePeriod=30 Sep 29 22:09:33 crc kubenswrapper[4911]: I0929 22:09:33.377162 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 29 22:09:33 crc kubenswrapper[4911]: I0929 22:09:33.503340 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wgt2h\" (UniqueName: \"kubernetes.io/projected/9983d5ff-ae31-4562-a659-9acb5742e5e7-kube-api-access-wgt2h\") pod \"9983d5ff-ae31-4562-a659-9acb5742e5e7\" (UID: \"9983d5ff-ae31-4562-a659-9acb5742e5e7\") " Sep 29 22:09:33 crc kubenswrapper[4911]: I0929 22:09:33.503468 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/9983d5ff-ae31-4562-a659-9acb5742e5e7-kube-state-metrics-tls-certs\") pod \"9983d5ff-ae31-4562-a659-9acb5742e5e7\" (UID: \"9983d5ff-ae31-4562-a659-9acb5742e5e7\") " Sep 29 22:09:33 crc kubenswrapper[4911]: I0929 22:09:33.503523 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9983d5ff-ae31-4562-a659-9acb5742e5e7-combined-ca-bundle\") pod \"9983d5ff-ae31-4562-a659-9acb5742e5e7\" (UID: \"9983d5ff-ae31-4562-a659-9acb5742e5e7\") " Sep 29 22:09:33 crc kubenswrapper[4911]: I0929 22:09:33.503672 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/9983d5ff-ae31-4562-a659-9acb5742e5e7-kube-state-metrics-tls-config\") pod \"9983d5ff-ae31-4562-a659-9acb5742e5e7\" (UID: \"9983d5ff-ae31-4562-a659-9acb5742e5e7\") " Sep 29 22:09:33 crc kubenswrapper[4911]: I0929 22:09:33.523053 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9983d5ff-ae31-4562-a659-9acb5742e5e7-kube-api-access-wgt2h" (OuterVolumeSpecName: "kube-api-access-wgt2h") pod "9983d5ff-ae31-4562-a659-9acb5742e5e7" (UID: "9983d5ff-ae31-4562-a659-9acb5742e5e7"). InnerVolumeSpecName "kube-api-access-wgt2h". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:09:33 crc kubenswrapper[4911]: I0929 22:09:33.555355 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9983d5ff-ae31-4562-a659-9acb5742e5e7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9983d5ff-ae31-4562-a659-9acb5742e5e7" (UID: "9983d5ff-ae31-4562-a659-9acb5742e5e7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:09:33 crc kubenswrapper[4911]: I0929 22:09:33.558146 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9983d5ff-ae31-4562-a659-9acb5742e5e7-kube-state-metrics-tls-config" (OuterVolumeSpecName: "kube-state-metrics-tls-config") pod "9983d5ff-ae31-4562-a659-9acb5742e5e7" (UID: "9983d5ff-ae31-4562-a659-9acb5742e5e7"). InnerVolumeSpecName "kube-state-metrics-tls-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:09:33 crc kubenswrapper[4911]: I0929 22:09:33.572404 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9983d5ff-ae31-4562-a659-9acb5742e5e7-kube-state-metrics-tls-certs" (OuterVolumeSpecName: "kube-state-metrics-tls-certs") pod "9983d5ff-ae31-4562-a659-9acb5742e5e7" (UID: "9983d5ff-ae31-4562-a659-9acb5742e5e7"). InnerVolumeSpecName "kube-state-metrics-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:09:33 crc kubenswrapper[4911]: I0929 22:09:33.606325 4911 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/9983d5ff-ae31-4562-a659-9acb5742e5e7-kube-state-metrics-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 22:09:33 crc kubenswrapper[4911]: I0929 22:09:33.606362 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9983d5ff-ae31-4562-a659-9acb5742e5e7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:09:33 crc kubenswrapper[4911]: I0929 22:09:33.606371 4911 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/9983d5ff-ae31-4562-a659-9acb5742e5e7-kube-state-metrics-tls-config\") on node \"crc\" DevicePath \"\"" Sep 29 22:09:33 crc kubenswrapper[4911]: I0929 22:09:33.606380 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wgt2h\" (UniqueName: \"kubernetes.io/projected/9983d5ff-ae31-4562-a659-9acb5742e5e7-kube-api-access-wgt2h\") on node \"crc\" DevicePath \"\"" Sep 29 22:09:33 crc kubenswrapper[4911]: I0929 22:09:33.726782 4911 generic.go:334] "Generic (PLEG): container finished" podID="9983d5ff-ae31-4562-a659-9acb5742e5e7" containerID="5a575dcdcebadb59bafff257f7e51e02f8abe8b180b82d01a5f1380797b38cc3" exitCode=2 Sep 29 22:09:33 crc kubenswrapper[4911]: I0929 22:09:33.726850 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"9983d5ff-ae31-4562-a659-9acb5742e5e7","Type":"ContainerDied","Data":"5a575dcdcebadb59bafff257f7e51e02f8abe8b180b82d01a5f1380797b38cc3"} Sep 29 22:09:33 crc kubenswrapper[4911]: I0929 22:09:33.726909 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"9983d5ff-ae31-4562-a659-9acb5742e5e7","Type":"ContainerDied","Data":"4e2d17c5d1b3d6b7c6bc699a53a4bae88a817595e42df75007da0e1fafa2eb59"} Sep 29 22:09:33 crc kubenswrapper[4911]: I0929 22:09:33.726927 4911 scope.go:117] "RemoveContainer" containerID="5a575dcdcebadb59bafff257f7e51e02f8abe8b180b82d01a5f1380797b38cc3" Sep 29 22:09:33 crc kubenswrapper[4911]: I0929 22:09:33.727446 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 29 22:09:33 crc kubenswrapper[4911]: I0929 22:09:33.730042 4911 generic.go:334] "Generic (PLEG): container finished" podID="f523c771-a76c-4854-a62f-85e929e1a24b" containerID="0998db1c2481e429d6d3cf8cf9810741850cbc9e998af86ada61bcdb74e88952" exitCode=0 Sep 29 22:09:33 crc kubenswrapper[4911]: I0929 22:09:33.730074 4911 generic.go:334] "Generic (PLEG): container finished" podID="f523c771-a76c-4854-a62f-85e929e1a24b" containerID="e6b054ee7cb44cc6ca8c99c0dd03f90a290454ed24e04ef5ec6ed6badde62656" exitCode=0 Sep 29 22:09:33 crc kubenswrapper[4911]: I0929 22:09:33.730088 4911 generic.go:334] "Generic (PLEG): container finished" podID="f523c771-a76c-4854-a62f-85e929e1a24b" containerID="90e66e9c61203eac6ff6251eca642f545676666c48546e22f0fe2ec932b12715" exitCode=2 Sep 29 22:09:33 crc kubenswrapper[4911]: I0929 22:09:33.730108 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f523c771-a76c-4854-a62f-85e929e1a24b","Type":"ContainerDied","Data":"0998db1c2481e429d6d3cf8cf9810741850cbc9e998af86ada61bcdb74e88952"} Sep 29 22:09:33 crc kubenswrapper[4911]: I0929 22:09:33.730138 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f523c771-a76c-4854-a62f-85e929e1a24b","Type":"ContainerDied","Data":"e6b054ee7cb44cc6ca8c99c0dd03f90a290454ed24e04ef5ec6ed6badde62656"} Sep 29 22:09:33 crc kubenswrapper[4911]: I0929 22:09:33.730155 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f523c771-a76c-4854-a62f-85e929e1a24b","Type":"ContainerDied","Data":"90e66e9c61203eac6ff6251eca642f545676666c48546e22f0fe2ec932b12715"} Sep 29 22:09:33 crc kubenswrapper[4911]: I0929 22:09:33.765949 4911 scope.go:117] "RemoveContainer" containerID="5a575dcdcebadb59bafff257f7e51e02f8abe8b180b82d01a5f1380797b38cc3" Sep 29 22:09:33 crc kubenswrapper[4911]: E0929 22:09:33.767370 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5a575dcdcebadb59bafff257f7e51e02f8abe8b180b82d01a5f1380797b38cc3\": container with ID starting with 5a575dcdcebadb59bafff257f7e51e02f8abe8b180b82d01a5f1380797b38cc3 not found: ID does not exist" containerID="5a575dcdcebadb59bafff257f7e51e02f8abe8b180b82d01a5f1380797b38cc3" Sep 29 22:09:33 crc kubenswrapper[4911]: I0929 22:09:33.767426 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5a575dcdcebadb59bafff257f7e51e02f8abe8b180b82d01a5f1380797b38cc3"} err="failed to get container status \"5a575dcdcebadb59bafff257f7e51e02f8abe8b180b82d01a5f1380797b38cc3\": rpc error: code = NotFound desc = could not find container \"5a575dcdcebadb59bafff257f7e51e02f8abe8b180b82d01a5f1380797b38cc3\": container with ID starting with 5a575dcdcebadb59bafff257f7e51e02f8abe8b180b82d01a5f1380797b38cc3 not found: ID does not exist" Sep 29 22:09:33 crc kubenswrapper[4911]: I0929 22:09:33.773499 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 22:09:33 crc kubenswrapper[4911]: I0929 22:09:33.780988 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 22:09:34 crc kubenswrapper[4911]: I0929 22:09:34.718598 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9983d5ff-ae31-4562-a659-9acb5742e5e7" path="/var/lib/kubelet/pods/9983d5ff-ae31-4562-a659-9acb5742e5e7/volumes" Sep 29 22:09:37 crc kubenswrapper[4911]: I0929 22:09:37.867017 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694dj5gp"] Sep 29 22:09:37 crc kubenswrapper[4911]: E0929 22:09:37.867634 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16ca72ec-4fbd-4367-b5c8-ea180f6fc189" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Sep 29 22:09:37 crc kubenswrapper[4911]: I0929 22:09:37.867647 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="16ca72ec-4fbd-4367-b5c8-ea180f6fc189" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Sep 29 22:09:37 crc kubenswrapper[4911]: E0929 22:09:37.867666 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="653d35c6-3971-4326-9510-6def96d40bd8" containerName="extract-content" Sep 29 22:09:37 crc kubenswrapper[4911]: I0929 22:09:37.867672 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="653d35c6-3971-4326-9510-6def96d40bd8" containerName="extract-content" Sep 29 22:09:37 crc kubenswrapper[4911]: E0929 22:09:37.867685 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="653d35c6-3971-4326-9510-6def96d40bd8" containerName="extract-utilities" Sep 29 22:09:37 crc kubenswrapper[4911]: I0929 22:09:37.867691 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="653d35c6-3971-4326-9510-6def96d40bd8" containerName="extract-utilities" Sep 29 22:09:37 crc kubenswrapper[4911]: E0929 22:09:37.867708 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9983d5ff-ae31-4562-a659-9acb5742e5e7" containerName="kube-state-metrics" Sep 29 22:09:37 crc kubenswrapper[4911]: I0929 22:09:37.867714 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="9983d5ff-ae31-4562-a659-9acb5742e5e7" containerName="kube-state-metrics" Sep 29 22:09:37 crc kubenswrapper[4911]: E0929 22:09:37.867725 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="653d35c6-3971-4326-9510-6def96d40bd8" containerName="registry-server" Sep 29 22:09:37 crc kubenswrapper[4911]: I0929 22:09:37.867731 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="653d35c6-3971-4326-9510-6def96d40bd8" containerName="registry-server" Sep 29 22:09:37 crc kubenswrapper[4911]: I0929 22:09:37.867907 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="653d35c6-3971-4326-9510-6def96d40bd8" containerName="registry-server" Sep 29 22:09:37 crc kubenswrapper[4911]: I0929 22:09:37.867922 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="9983d5ff-ae31-4562-a659-9acb5742e5e7" containerName="kube-state-metrics" Sep 29 22:09:37 crc kubenswrapper[4911]: I0929 22:09:37.867947 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="16ca72ec-4fbd-4367-b5c8-ea180f6fc189" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Sep 29 22:09:37 crc kubenswrapper[4911]: I0929 22:09:37.869218 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694dj5gp" Sep 29 22:09:37 crc kubenswrapper[4911]: I0929 22:09:37.874948 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Sep 29 22:09:37 crc kubenswrapper[4911]: I0929 22:09:37.884859 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694dj5gp"] Sep 29 22:09:38 crc kubenswrapper[4911]: I0929 22:09:38.010651 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v22b7\" (UniqueName: \"kubernetes.io/projected/135c9e3e-6b82-4a96-8684-ba2d8b9fe7f3-kube-api-access-v22b7\") pod \"03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694dj5gp\" (UID: \"135c9e3e-6b82-4a96-8684-ba2d8b9fe7f3\") " pod="openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694dj5gp" Sep 29 22:09:38 crc kubenswrapper[4911]: I0929 22:09:38.011031 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/135c9e3e-6b82-4a96-8684-ba2d8b9fe7f3-bundle\") pod \"03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694dj5gp\" (UID: \"135c9e3e-6b82-4a96-8684-ba2d8b9fe7f3\") " pod="openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694dj5gp" Sep 29 22:09:38 crc kubenswrapper[4911]: I0929 22:09:38.011213 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/135c9e3e-6b82-4a96-8684-ba2d8b9fe7f3-util\") pod \"03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694dj5gp\" (UID: \"135c9e3e-6b82-4a96-8684-ba2d8b9fe7f3\") " pod="openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694dj5gp" Sep 29 22:09:38 crc kubenswrapper[4911]: I0929 22:09:38.113077 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/135c9e3e-6b82-4a96-8684-ba2d8b9fe7f3-bundle\") pod \"03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694dj5gp\" (UID: \"135c9e3e-6b82-4a96-8684-ba2d8b9fe7f3\") " pod="openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694dj5gp" Sep 29 22:09:38 crc kubenswrapper[4911]: I0929 22:09:38.113393 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/135c9e3e-6b82-4a96-8684-ba2d8b9fe7f3-util\") pod \"03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694dj5gp\" (UID: \"135c9e3e-6b82-4a96-8684-ba2d8b9fe7f3\") " pod="openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694dj5gp" Sep 29 22:09:38 crc kubenswrapper[4911]: I0929 22:09:38.113497 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v22b7\" (UniqueName: \"kubernetes.io/projected/135c9e3e-6b82-4a96-8684-ba2d8b9fe7f3-kube-api-access-v22b7\") pod \"03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694dj5gp\" (UID: \"135c9e3e-6b82-4a96-8684-ba2d8b9fe7f3\") " pod="openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694dj5gp" Sep 29 22:09:38 crc kubenswrapper[4911]: I0929 22:09:38.113836 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/135c9e3e-6b82-4a96-8684-ba2d8b9fe7f3-bundle\") pod \"03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694dj5gp\" (UID: \"135c9e3e-6b82-4a96-8684-ba2d8b9fe7f3\") " pod="openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694dj5gp" Sep 29 22:09:38 crc kubenswrapper[4911]: I0929 22:09:38.114286 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/135c9e3e-6b82-4a96-8684-ba2d8b9fe7f3-util\") pod \"03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694dj5gp\" (UID: \"135c9e3e-6b82-4a96-8684-ba2d8b9fe7f3\") " pod="openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694dj5gp" Sep 29 22:09:38 crc kubenswrapper[4911]: I0929 22:09:38.154965 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v22b7\" (UniqueName: \"kubernetes.io/projected/135c9e3e-6b82-4a96-8684-ba2d8b9fe7f3-kube-api-access-v22b7\") pod \"03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694dj5gp\" (UID: \"135c9e3e-6b82-4a96-8684-ba2d8b9fe7f3\") " pod="openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694dj5gp" Sep 29 22:09:38 crc kubenswrapper[4911]: I0929 22:09:38.207054 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694dj5gp" Sep 29 22:09:38 crc kubenswrapper[4911]: I0929 22:09:38.485989 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 22:09:38 crc kubenswrapper[4911]: I0929 22:09:38.626549 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f523c771-a76c-4854-a62f-85e929e1a24b-sg-core-conf-yaml\") pod \"f523c771-a76c-4854-a62f-85e929e1a24b\" (UID: \"f523c771-a76c-4854-a62f-85e929e1a24b\") " Sep 29 22:09:38 crc kubenswrapper[4911]: I0929 22:09:38.626781 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f523c771-a76c-4854-a62f-85e929e1a24b-combined-ca-bundle\") pod \"f523c771-a76c-4854-a62f-85e929e1a24b\" (UID: \"f523c771-a76c-4854-a62f-85e929e1a24b\") " Sep 29 22:09:38 crc kubenswrapper[4911]: I0929 22:09:38.626876 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hf94t\" (UniqueName: \"kubernetes.io/projected/f523c771-a76c-4854-a62f-85e929e1a24b-kube-api-access-hf94t\") pod \"f523c771-a76c-4854-a62f-85e929e1a24b\" (UID: \"f523c771-a76c-4854-a62f-85e929e1a24b\") " Sep 29 22:09:38 crc kubenswrapper[4911]: I0929 22:09:38.626926 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f523c771-a76c-4854-a62f-85e929e1a24b-log-httpd\") pod \"f523c771-a76c-4854-a62f-85e929e1a24b\" (UID: \"f523c771-a76c-4854-a62f-85e929e1a24b\") " Sep 29 22:09:38 crc kubenswrapper[4911]: I0929 22:09:38.626966 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/f523c771-a76c-4854-a62f-85e929e1a24b-ceilometer-tls-certs\") pod \"f523c771-a76c-4854-a62f-85e929e1a24b\" (UID: \"f523c771-a76c-4854-a62f-85e929e1a24b\") " Sep 29 22:09:38 crc kubenswrapper[4911]: I0929 22:09:38.627190 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f523c771-a76c-4854-a62f-85e929e1a24b-run-httpd\") pod \"f523c771-a76c-4854-a62f-85e929e1a24b\" (UID: \"f523c771-a76c-4854-a62f-85e929e1a24b\") " Sep 29 22:09:38 crc kubenswrapper[4911]: I0929 22:09:38.627258 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f523c771-a76c-4854-a62f-85e929e1a24b-config-data\") pod \"f523c771-a76c-4854-a62f-85e929e1a24b\" (UID: \"f523c771-a76c-4854-a62f-85e929e1a24b\") " Sep 29 22:09:38 crc kubenswrapper[4911]: I0929 22:09:38.627279 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f523c771-a76c-4854-a62f-85e929e1a24b-scripts\") pod \"f523c771-a76c-4854-a62f-85e929e1a24b\" (UID: \"f523c771-a76c-4854-a62f-85e929e1a24b\") " Sep 29 22:09:38 crc kubenswrapper[4911]: I0929 22:09:38.627544 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f523c771-a76c-4854-a62f-85e929e1a24b-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "f523c771-a76c-4854-a62f-85e929e1a24b" (UID: "f523c771-a76c-4854-a62f-85e929e1a24b"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:09:38 crc kubenswrapper[4911]: I0929 22:09:38.627667 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f523c771-a76c-4854-a62f-85e929e1a24b-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "f523c771-a76c-4854-a62f-85e929e1a24b" (UID: "f523c771-a76c-4854-a62f-85e929e1a24b"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:09:38 crc kubenswrapper[4911]: I0929 22:09:38.628359 4911 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f523c771-a76c-4854-a62f-85e929e1a24b-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 22:09:38 crc kubenswrapper[4911]: I0929 22:09:38.628382 4911 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f523c771-a76c-4854-a62f-85e929e1a24b-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 22:09:38 crc kubenswrapper[4911]: I0929 22:09:38.631424 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f523c771-a76c-4854-a62f-85e929e1a24b-kube-api-access-hf94t" (OuterVolumeSpecName: "kube-api-access-hf94t") pod "f523c771-a76c-4854-a62f-85e929e1a24b" (UID: "f523c771-a76c-4854-a62f-85e929e1a24b"). InnerVolumeSpecName "kube-api-access-hf94t". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:09:38 crc kubenswrapper[4911]: I0929 22:09:38.631759 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f523c771-a76c-4854-a62f-85e929e1a24b-scripts" (OuterVolumeSpecName: "scripts") pod "f523c771-a76c-4854-a62f-85e929e1a24b" (UID: "f523c771-a76c-4854-a62f-85e929e1a24b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:09:38 crc kubenswrapper[4911]: I0929 22:09:38.652764 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f523c771-a76c-4854-a62f-85e929e1a24b-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "f523c771-a76c-4854-a62f-85e929e1a24b" (UID: "f523c771-a76c-4854-a62f-85e929e1a24b"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:09:38 crc kubenswrapper[4911]: I0929 22:09:38.675727 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f523c771-a76c-4854-a62f-85e929e1a24b-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "f523c771-a76c-4854-a62f-85e929e1a24b" (UID: "f523c771-a76c-4854-a62f-85e929e1a24b"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:09:38 crc kubenswrapper[4911]: I0929 22:09:38.690759 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f523c771-a76c-4854-a62f-85e929e1a24b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f523c771-a76c-4854-a62f-85e929e1a24b" (UID: "f523c771-a76c-4854-a62f-85e929e1a24b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:09:38 crc kubenswrapper[4911]: I0929 22:09:38.729643 4911 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f523c771-a76c-4854-a62f-85e929e1a24b-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 29 22:09:38 crc kubenswrapper[4911]: I0929 22:09:38.729669 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f523c771-a76c-4854-a62f-85e929e1a24b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:09:38 crc kubenswrapper[4911]: I0929 22:09:38.729678 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hf94t\" (UniqueName: \"kubernetes.io/projected/f523c771-a76c-4854-a62f-85e929e1a24b-kube-api-access-hf94t\") on node \"crc\" DevicePath \"\"" Sep 29 22:09:38 crc kubenswrapper[4911]: I0929 22:09:38.729689 4911 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/f523c771-a76c-4854-a62f-85e929e1a24b-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 22:09:38 crc kubenswrapper[4911]: I0929 22:09:38.729697 4911 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f523c771-a76c-4854-a62f-85e929e1a24b-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 22:09:38 crc kubenswrapper[4911]: I0929 22:09:38.730867 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f523c771-a76c-4854-a62f-85e929e1a24b-config-data" (OuterVolumeSpecName: "config-data") pod "f523c771-a76c-4854-a62f-85e929e1a24b" (UID: "f523c771-a76c-4854-a62f-85e929e1a24b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:09:38 crc kubenswrapper[4911]: I0929 22:09:38.791095 4911 generic.go:334] "Generic (PLEG): container finished" podID="f523c771-a76c-4854-a62f-85e929e1a24b" containerID="522d39b33961c43600a768ab5e6c9aa1c0920cde5a1eb5291326ffb5ccf952d0" exitCode=0 Sep 29 22:09:38 crc kubenswrapper[4911]: I0929 22:09:38.791147 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f523c771-a76c-4854-a62f-85e929e1a24b","Type":"ContainerDied","Data":"522d39b33961c43600a768ab5e6c9aa1c0920cde5a1eb5291326ffb5ccf952d0"} Sep 29 22:09:38 crc kubenswrapper[4911]: I0929 22:09:38.791176 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f523c771-a76c-4854-a62f-85e929e1a24b","Type":"ContainerDied","Data":"5b765ea2e53b75bce1124d4de7943105c74eda1d5528d7716858d5dfb32b6222"} Sep 29 22:09:38 crc kubenswrapper[4911]: I0929 22:09:38.791199 4911 scope.go:117] "RemoveContainer" containerID="0998db1c2481e429d6d3cf8cf9810741850cbc9e998af86ada61bcdb74e88952" Sep 29 22:09:38 crc kubenswrapper[4911]: I0929 22:09:38.791207 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 22:09:38 crc kubenswrapper[4911]: I0929 22:09:38.819840 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694dj5gp"] Sep 29 22:09:38 crc kubenswrapper[4911]: I0929 22:09:38.831134 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 22:09:38 crc kubenswrapper[4911]: I0929 22:09:38.831621 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f523c771-a76c-4854-a62f-85e929e1a24b-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 22:09:38 crc kubenswrapper[4911]: I0929 22:09:38.843522 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 29 22:09:38 crc kubenswrapper[4911]: I0929 22:09:38.845269 4911 scope.go:117] "RemoveContainer" containerID="e6b054ee7cb44cc6ca8c99c0dd03f90a290454ed24e04ef5ec6ed6badde62656" Sep 29 22:09:38 crc kubenswrapper[4911]: I0929 22:09:38.872756 4911 scope.go:117] "RemoveContainer" containerID="90e66e9c61203eac6ff6251eca642f545676666c48546e22f0fe2ec932b12715" Sep 29 22:09:38 crc kubenswrapper[4911]: I0929 22:09:38.889932 4911 scope.go:117] "RemoveContainer" containerID="522d39b33961c43600a768ab5e6c9aa1c0920cde5a1eb5291326ffb5ccf952d0" Sep 29 22:09:38 crc kubenswrapper[4911]: I0929 22:09:38.918294 4911 scope.go:117] "RemoveContainer" containerID="0998db1c2481e429d6d3cf8cf9810741850cbc9e998af86ada61bcdb74e88952" Sep 29 22:09:38 crc kubenswrapper[4911]: E0929 22:09:38.918731 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0998db1c2481e429d6d3cf8cf9810741850cbc9e998af86ada61bcdb74e88952\": container with ID starting with 0998db1c2481e429d6d3cf8cf9810741850cbc9e998af86ada61bcdb74e88952 not found: ID does not exist" containerID="0998db1c2481e429d6d3cf8cf9810741850cbc9e998af86ada61bcdb74e88952" Sep 29 22:09:38 crc kubenswrapper[4911]: I0929 22:09:38.918761 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0998db1c2481e429d6d3cf8cf9810741850cbc9e998af86ada61bcdb74e88952"} err="failed to get container status \"0998db1c2481e429d6d3cf8cf9810741850cbc9e998af86ada61bcdb74e88952\": rpc error: code = NotFound desc = could not find container \"0998db1c2481e429d6d3cf8cf9810741850cbc9e998af86ada61bcdb74e88952\": container with ID starting with 0998db1c2481e429d6d3cf8cf9810741850cbc9e998af86ada61bcdb74e88952 not found: ID does not exist" Sep 29 22:09:38 crc kubenswrapper[4911]: I0929 22:09:38.918781 4911 scope.go:117] "RemoveContainer" containerID="e6b054ee7cb44cc6ca8c99c0dd03f90a290454ed24e04ef5ec6ed6badde62656" Sep 29 22:09:38 crc kubenswrapper[4911]: E0929 22:09:38.919166 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e6b054ee7cb44cc6ca8c99c0dd03f90a290454ed24e04ef5ec6ed6badde62656\": container with ID starting with e6b054ee7cb44cc6ca8c99c0dd03f90a290454ed24e04ef5ec6ed6badde62656 not found: ID does not exist" containerID="e6b054ee7cb44cc6ca8c99c0dd03f90a290454ed24e04ef5ec6ed6badde62656" Sep 29 22:09:38 crc kubenswrapper[4911]: I0929 22:09:38.919182 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e6b054ee7cb44cc6ca8c99c0dd03f90a290454ed24e04ef5ec6ed6badde62656"} err="failed to get container status \"e6b054ee7cb44cc6ca8c99c0dd03f90a290454ed24e04ef5ec6ed6badde62656\": rpc error: code = NotFound desc = could not find container \"e6b054ee7cb44cc6ca8c99c0dd03f90a290454ed24e04ef5ec6ed6badde62656\": container with ID starting with e6b054ee7cb44cc6ca8c99c0dd03f90a290454ed24e04ef5ec6ed6badde62656 not found: ID does not exist" Sep 29 22:09:38 crc kubenswrapper[4911]: I0929 22:09:38.919195 4911 scope.go:117] "RemoveContainer" containerID="90e66e9c61203eac6ff6251eca642f545676666c48546e22f0fe2ec932b12715" Sep 29 22:09:38 crc kubenswrapper[4911]: E0929 22:09:38.919420 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"90e66e9c61203eac6ff6251eca642f545676666c48546e22f0fe2ec932b12715\": container with ID starting with 90e66e9c61203eac6ff6251eca642f545676666c48546e22f0fe2ec932b12715 not found: ID does not exist" containerID="90e66e9c61203eac6ff6251eca642f545676666c48546e22f0fe2ec932b12715" Sep 29 22:09:38 crc kubenswrapper[4911]: I0929 22:09:38.919437 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"90e66e9c61203eac6ff6251eca642f545676666c48546e22f0fe2ec932b12715"} err="failed to get container status \"90e66e9c61203eac6ff6251eca642f545676666c48546e22f0fe2ec932b12715\": rpc error: code = NotFound desc = could not find container \"90e66e9c61203eac6ff6251eca642f545676666c48546e22f0fe2ec932b12715\": container with ID starting with 90e66e9c61203eac6ff6251eca642f545676666c48546e22f0fe2ec932b12715 not found: ID does not exist" Sep 29 22:09:38 crc kubenswrapper[4911]: I0929 22:09:38.919449 4911 scope.go:117] "RemoveContainer" containerID="522d39b33961c43600a768ab5e6c9aa1c0920cde5a1eb5291326ffb5ccf952d0" Sep 29 22:09:38 crc kubenswrapper[4911]: E0929 22:09:38.919721 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"522d39b33961c43600a768ab5e6c9aa1c0920cde5a1eb5291326ffb5ccf952d0\": container with ID starting with 522d39b33961c43600a768ab5e6c9aa1c0920cde5a1eb5291326ffb5ccf952d0 not found: ID does not exist" containerID="522d39b33961c43600a768ab5e6c9aa1c0920cde5a1eb5291326ffb5ccf952d0" Sep 29 22:09:38 crc kubenswrapper[4911]: I0929 22:09:38.919736 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"522d39b33961c43600a768ab5e6c9aa1c0920cde5a1eb5291326ffb5ccf952d0"} err="failed to get container status \"522d39b33961c43600a768ab5e6c9aa1c0920cde5a1eb5291326ffb5ccf952d0\": rpc error: code = NotFound desc = could not find container \"522d39b33961c43600a768ab5e6c9aa1c0920cde5a1eb5291326ffb5ccf952d0\": container with ID starting with 522d39b33961c43600a768ab5e6c9aa1c0920cde5a1eb5291326ffb5ccf952d0 not found: ID does not exist" Sep 29 22:09:39 crc kubenswrapper[4911]: I0929 22:09:39.805818 4911 generic.go:334] "Generic (PLEG): container finished" podID="135c9e3e-6b82-4a96-8684-ba2d8b9fe7f3" containerID="2124b2c49295ccfb622f8e6bc2747567b9b9543a2ac078c889112eb1cf836ab6" exitCode=0 Sep 29 22:09:39 crc kubenswrapper[4911]: I0929 22:09:39.805958 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694dj5gp" event={"ID":"135c9e3e-6b82-4a96-8684-ba2d8b9fe7f3","Type":"ContainerDied","Data":"2124b2c49295ccfb622f8e6bc2747567b9b9543a2ac078c889112eb1cf836ab6"} Sep 29 22:09:39 crc kubenswrapper[4911]: I0929 22:09:39.806216 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694dj5gp" event={"ID":"135c9e3e-6b82-4a96-8684-ba2d8b9fe7f3","Type":"ContainerStarted","Data":"df15704df2ee40e44e59f791508dc41b65979293eb3ed47ad16ffc93980c9f09"} Sep 29 22:09:40 crc kubenswrapper[4911]: I0929 22:09:40.721223 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f523c771-a76c-4854-a62f-85e929e1a24b" path="/var/lib/kubelet/pods/f523c771-a76c-4854-a62f-85e929e1a24b/volumes" Sep 29 22:09:42 crc kubenswrapper[4911]: I0929 22:09:42.455359 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d7cbwj"] Sep 29 22:09:42 crc kubenswrapper[4911]: E0929 22:09:42.456131 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f523c771-a76c-4854-a62f-85e929e1a24b" containerName="ceilometer-notification-agent" Sep 29 22:09:42 crc kubenswrapper[4911]: I0929 22:09:42.456149 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="f523c771-a76c-4854-a62f-85e929e1a24b" containerName="ceilometer-notification-agent" Sep 29 22:09:42 crc kubenswrapper[4911]: E0929 22:09:42.456175 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f523c771-a76c-4854-a62f-85e929e1a24b" containerName="proxy-httpd" Sep 29 22:09:42 crc kubenswrapper[4911]: I0929 22:09:42.456183 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="f523c771-a76c-4854-a62f-85e929e1a24b" containerName="proxy-httpd" Sep 29 22:09:42 crc kubenswrapper[4911]: E0929 22:09:42.456195 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f523c771-a76c-4854-a62f-85e929e1a24b" containerName="sg-core" Sep 29 22:09:42 crc kubenswrapper[4911]: I0929 22:09:42.456202 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="f523c771-a76c-4854-a62f-85e929e1a24b" containerName="sg-core" Sep 29 22:09:42 crc kubenswrapper[4911]: E0929 22:09:42.456225 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f523c771-a76c-4854-a62f-85e929e1a24b" containerName="ceilometer-central-agent" Sep 29 22:09:42 crc kubenswrapper[4911]: I0929 22:09:42.456233 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="f523c771-a76c-4854-a62f-85e929e1a24b" containerName="ceilometer-central-agent" Sep 29 22:09:42 crc kubenswrapper[4911]: I0929 22:09:42.456425 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="f523c771-a76c-4854-a62f-85e929e1a24b" containerName="ceilometer-central-agent" Sep 29 22:09:42 crc kubenswrapper[4911]: I0929 22:09:42.456446 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="f523c771-a76c-4854-a62f-85e929e1a24b" containerName="ceilometer-notification-agent" Sep 29 22:09:42 crc kubenswrapper[4911]: I0929 22:09:42.456475 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="f523c771-a76c-4854-a62f-85e929e1a24b" containerName="proxy-httpd" Sep 29 22:09:42 crc kubenswrapper[4911]: I0929 22:09:42.456486 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="f523c771-a76c-4854-a62f-85e929e1a24b" containerName="sg-core" Sep 29 22:09:42 crc kubenswrapper[4911]: I0929 22:09:42.457927 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d7cbwj" Sep 29 22:09:42 crc kubenswrapper[4911]: I0929 22:09:42.510141 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d7cbwj"] Sep 29 22:09:42 crc kubenswrapper[4911]: I0929 22:09:42.601422 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/aa9a2254-fecc-4d77-bd00-c665acb7efe2-bundle\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d7cbwj\" (UID: \"aa9a2254-fecc-4d77-bd00-c665acb7efe2\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d7cbwj" Sep 29 22:09:42 crc kubenswrapper[4911]: I0929 22:09:42.601495 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2b5b4\" (UniqueName: \"kubernetes.io/projected/aa9a2254-fecc-4d77-bd00-c665acb7efe2-kube-api-access-2b5b4\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d7cbwj\" (UID: \"aa9a2254-fecc-4d77-bd00-c665acb7efe2\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d7cbwj" Sep 29 22:09:42 crc kubenswrapper[4911]: I0929 22:09:42.601684 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/aa9a2254-fecc-4d77-bd00-c665acb7efe2-util\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d7cbwj\" (UID: \"aa9a2254-fecc-4d77-bd00-c665acb7efe2\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d7cbwj" Sep 29 22:09:42 crc kubenswrapper[4911]: I0929 22:09:42.703579 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/aa9a2254-fecc-4d77-bd00-c665acb7efe2-util\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d7cbwj\" (UID: \"aa9a2254-fecc-4d77-bd00-c665acb7efe2\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d7cbwj" Sep 29 22:09:42 crc kubenswrapper[4911]: I0929 22:09:42.703626 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/aa9a2254-fecc-4d77-bd00-c665acb7efe2-bundle\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d7cbwj\" (UID: \"aa9a2254-fecc-4d77-bd00-c665acb7efe2\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d7cbwj" Sep 29 22:09:42 crc kubenswrapper[4911]: I0929 22:09:42.703673 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2b5b4\" (UniqueName: \"kubernetes.io/projected/aa9a2254-fecc-4d77-bd00-c665acb7efe2-kube-api-access-2b5b4\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d7cbwj\" (UID: \"aa9a2254-fecc-4d77-bd00-c665acb7efe2\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d7cbwj" Sep 29 22:09:42 crc kubenswrapper[4911]: I0929 22:09:42.704199 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/aa9a2254-fecc-4d77-bd00-c665acb7efe2-util\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d7cbwj\" (UID: \"aa9a2254-fecc-4d77-bd00-c665acb7efe2\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d7cbwj" Sep 29 22:09:42 crc kubenswrapper[4911]: I0929 22:09:42.704248 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/aa9a2254-fecc-4d77-bd00-c665acb7efe2-bundle\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d7cbwj\" (UID: \"aa9a2254-fecc-4d77-bd00-c665acb7efe2\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d7cbwj" Sep 29 22:09:42 crc kubenswrapper[4911]: I0929 22:09:42.726998 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2b5b4\" (UniqueName: \"kubernetes.io/projected/aa9a2254-fecc-4d77-bd00-c665acb7efe2-kube-api-access-2b5b4\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d7cbwj\" (UID: \"aa9a2254-fecc-4d77-bd00-c665acb7efe2\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d7cbwj" Sep 29 22:09:42 crc kubenswrapper[4911]: I0929 22:09:42.778189 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d7cbwj" Sep 29 22:09:42 crc kubenswrapper[4911]: I0929 22:09:42.851586 4911 generic.go:334] "Generic (PLEG): container finished" podID="135c9e3e-6b82-4a96-8684-ba2d8b9fe7f3" containerID="69335cc647c91f864285474b4d28d4906843dd6b08fdac8bcbaa415437ecf173" exitCode=0 Sep 29 22:09:42 crc kubenswrapper[4911]: I0929 22:09:42.851633 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694dj5gp" event={"ID":"135c9e3e-6b82-4a96-8684-ba2d8b9fe7f3","Type":"ContainerDied","Data":"69335cc647c91f864285474b4d28d4906843dd6b08fdac8bcbaa415437ecf173"} Sep 29 22:09:43 crc kubenswrapper[4911]: W0929 22:09:43.292079 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaa9a2254_fecc_4d77_bd00_c665acb7efe2.slice/crio-9fb4ac9360f62967e8521ddb3ad1434f5f2d2e7b0445a2b4b12d48edc337e50a WatchSource:0}: Error finding container 9fb4ac9360f62967e8521ddb3ad1434f5f2d2e7b0445a2b4b12d48edc337e50a: Status 404 returned error can't find the container with id 9fb4ac9360f62967e8521ddb3ad1434f5f2d2e7b0445a2b4b12d48edc337e50a Sep 29 22:09:43 crc kubenswrapper[4911]: I0929 22:09:43.299129 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d7cbwj"] Sep 29 22:09:43 crc kubenswrapper[4911]: I0929 22:09:43.860591 4911 generic.go:334] "Generic (PLEG): container finished" podID="aa9a2254-fecc-4d77-bd00-c665acb7efe2" containerID="5d36173a758267426d5994346c649ae2e364cb096df1c04955081098d9e22493" exitCode=0 Sep 29 22:09:43 crc kubenswrapper[4911]: I0929 22:09:43.861092 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d7cbwj" event={"ID":"aa9a2254-fecc-4d77-bd00-c665acb7efe2","Type":"ContainerDied","Data":"5d36173a758267426d5994346c649ae2e364cb096df1c04955081098d9e22493"} Sep 29 22:09:43 crc kubenswrapper[4911]: I0929 22:09:43.861144 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d7cbwj" event={"ID":"aa9a2254-fecc-4d77-bd00-c665acb7efe2","Type":"ContainerStarted","Data":"9fb4ac9360f62967e8521ddb3ad1434f5f2d2e7b0445a2b4b12d48edc337e50a"} Sep 29 22:09:43 crc kubenswrapper[4911]: I0929 22:09:43.864003 4911 generic.go:334] "Generic (PLEG): container finished" podID="135c9e3e-6b82-4a96-8684-ba2d8b9fe7f3" containerID="fe6ee4e001105668e7890601b1818fb0afbc87338a2cd25a79b0b74cd9395d5e" exitCode=0 Sep 29 22:09:43 crc kubenswrapper[4911]: I0929 22:09:43.864053 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694dj5gp" event={"ID":"135c9e3e-6b82-4a96-8684-ba2d8b9fe7f3","Type":"ContainerDied","Data":"fe6ee4e001105668e7890601b1818fb0afbc87338a2cd25a79b0b74cd9395d5e"} Sep 29 22:09:45 crc kubenswrapper[4911]: I0929 22:09:45.314014 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694dj5gp" Sep 29 22:09:45 crc kubenswrapper[4911]: I0929 22:09:45.452558 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v22b7\" (UniqueName: \"kubernetes.io/projected/135c9e3e-6b82-4a96-8684-ba2d8b9fe7f3-kube-api-access-v22b7\") pod \"135c9e3e-6b82-4a96-8684-ba2d8b9fe7f3\" (UID: \"135c9e3e-6b82-4a96-8684-ba2d8b9fe7f3\") " Sep 29 22:09:45 crc kubenswrapper[4911]: I0929 22:09:45.452689 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/135c9e3e-6b82-4a96-8684-ba2d8b9fe7f3-util\") pod \"135c9e3e-6b82-4a96-8684-ba2d8b9fe7f3\" (UID: \"135c9e3e-6b82-4a96-8684-ba2d8b9fe7f3\") " Sep 29 22:09:45 crc kubenswrapper[4911]: I0929 22:09:45.452755 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/135c9e3e-6b82-4a96-8684-ba2d8b9fe7f3-bundle\") pod \"135c9e3e-6b82-4a96-8684-ba2d8b9fe7f3\" (UID: \"135c9e3e-6b82-4a96-8684-ba2d8b9fe7f3\") " Sep 29 22:09:45 crc kubenswrapper[4911]: I0929 22:09:45.454687 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/135c9e3e-6b82-4a96-8684-ba2d8b9fe7f3-bundle" (OuterVolumeSpecName: "bundle") pod "135c9e3e-6b82-4a96-8684-ba2d8b9fe7f3" (UID: "135c9e3e-6b82-4a96-8684-ba2d8b9fe7f3"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:09:45 crc kubenswrapper[4911]: I0929 22:09:45.459903 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/135c9e3e-6b82-4a96-8684-ba2d8b9fe7f3-kube-api-access-v22b7" (OuterVolumeSpecName: "kube-api-access-v22b7") pod "135c9e3e-6b82-4a96-8684-ba2d8b9fe7f3" (UID: "135c9e3e-6b82-4a96-8684-ba2d8b9fe7f3"). InnerVolumeSpecName "kube-api-access-v22b7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:09:45 crc kubenswrapper[4911]: I0929 22:09:45.481200 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/135c9e3e-6b82-4a96-8684-ba2d8b9fe7f3-util" (OuterVolumeSpecName: "util") pod "135c9e3e-6b82-4a96-8684-ba2d8b9fe7f3" (UID: "135c9e3e-6b82-4a96-8684-ba2d8b9fe7f3"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:09:45 crc kubenswrapper[4911]: I0929 22:09:45.554936 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v22b7\" (UniqueName: \"kubernetes.io/projected/135c9e3e-6b82-4a96-8684-ba2d8b9fe7f3-kube-api-access-v22b7\") on node \"crc\" DevicePath \"\"" Sep 29 22:09:45 crc kubenswrapper[4911]: I0929 22:09:45.554990 4911 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/135c9e3e-6b82-4a96-8684-ba2d8b9fe7f3-util\") on node \"crc\" DevicePath \"\"" Sep 29 22:09:45 crc kubenswrapper[4911]: I0929 22:09:45.555010 4911 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/135c9e3e-6b82-4a96-8684-ba2d8b9fe7f3-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:09:45 crc kubenswrapper[4911]: I0929 22:09:45.886706 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694dj5gp" event={"ID":"135c9e3e-6b82-4a96-8684-ba2d8b9fe7f3","Type":"ContainerDied","Data":"df15704df2ee40e44e59f791508dc41b65979293eb3ed47ad16ffc93980c9f09"} Sep 29 22:09:45 crc kubenswrapper[4911]: I0929 22:09:45.887026 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="df15704df2ee40e44e59f791508dc41b65979293eb3ed47ad16ffc93980c9f09" Sep 29 22:09:45 crc kubenswrapper[4911]: I0929 22:09:45.886763 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694dj5gp" Sep 29 22:09:46 crc kubenswrapper[4911]: I0929 22:09:46.899966 4911 generic.go:334] "Generic (PLEG): container finished" podID="aa9a2254-fecc-4d77-bd00-c665acb7efe2" containerID="804b20c4a3866f613d15ff0625a37aadf5f6478d7aaa062d0448393f0c74dc53" exitCode=0 Sep 29 22:09:46 crc kubenswrapper[4911]: I0929 22:09:46.900034 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d7cbwj" event={"ID":"aa9a2254-fecc-4d77-bd00-c665acb7efe2","Type":"ContainerDied","Data":"804b20c4a3866f613d15ff0625a37aadf5f6478d7aaa062d0448393f0c74dc53"} Sep 29 22:09:47 crc kubenswrapper[4911]: I0929 22:09:47.913528 4911 generic.go:334] "Generic (PLEG): container finished" podID="aa9a2254-fecc-4d77-bd00-c665acb7efe2" containerID="a9e9ccdf48f9904aa383407280f90b2d582784388c457199ecd25e47a1e3f491" exitCode=0 Sep 29 22:09:47 crc kubenswrapper[4911]: I0929 22:09:47.913628 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d7cbwj" event={"ID":"aa9a2254-fecc-4d77-bd00-c665acb7efe2","Type":"ContainerDied","Data":"a9e9ccdf48f9904aa383407280f90b2d582784388c457199ecd25e47a1e3f491"} Sep 29 22:09:49 crc kubenswrapper[4911]: I0929 22:09:49.281151 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d7cbwj" Sep 29 22:09:49 crc kubenswrapper[4911]: I0929 22:09:49.318507 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/aa9a2254-fecc-4d77-bd00-c665acb7efe2-bundle\") pod \"aa9a2254-fecc-4d77-bd00-c665acb7efe2\" (UID: \"aa9a2254-fecc-4d77-bd00-c665acb7efe2\") " Sep 29 22:09:49 crc kubenswrapper[4911]: I0929 22:09:49.318599 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/aa9a2254-fecc-4d77-bd00-c665acb7efe2-util\") pod \"aa9a2254-fecc-4d77-bd00-c665acb7efe2\" (UID: \"aa9a2254-fecc-4d77-bd00-c665acb7efe2\") " Sep 29 22:09:49 crc kubenswrapper[4911]: I0929 22:09:49.318811 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2b5b4\" (UniqueName: \"kubernetes.io/projected/aa9a2254-fecc-4d77-bd00-c665acb7efe2-kube-api-access-2b5b4\") pod \"aa9a2254-fecc-4d77-bd00-c665acb7efe2\" (UID: \"aa9a2254-fecc-4d77-bd00-c665acb7efe2\") " Sep 29 22:09:49 crc kubenswrapper[4911]: I0929 22:09:49.327040 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aa9a2254-fecc-4d77-bd00-c665acb7efe2-kube-api-access-2b5b4" (OuterVolumeSpecName: "kube-api-access-2b5b4") pod "aa9a2254-fecc-4d77-bd00-c665acb7efe2" (UID: "aa9a2254-fecc-4d77-bd00-c665acb7efe2"). InnerVolumeSpecName "kube-api-access-2b5b4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:09:49 crc kubenswrapper[4911]: I0929 22:09:49.340194 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aa9a2254-fecc-4d77-bd00-c665acb7efe2-bundle" (OuterVolumeSpecName: "bundle") pod "aa9a2254-fecc-4d77-bd00-c665acb7efe2" (UID: "aa9a2254-fecc-4d77-bd00-c665acb7efe2"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:09:49 crc kubenswrapper[4911]: I0929 22:09:49.347364 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aa9a2254-fecc-4d77-bd00-c665acb7efe2-util" (OuterVolumeSpecName: "util") pod "aa9a2254-fecc-4d77-bd00-c665acb7efe2" (UID: "aa9a2254-fecc-4d77-bd00-c665acb7efe2"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:09:49 crc kubenswrapper[4911]: I0929 22:09:49.421227 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2b5b4\" (UniqueName: \"kubernetes.io/projected/aa9a2254-fecc-4d77-bd00-c665acb7efe2-kube-api-access-2b5b4\") on node \"crc\" DevicePath \"\"" Sep 29 22:09:49 crc kubenswrapper[4911]: I0929 22:09:49.421253 4911 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/aa9a2254-fecc-4d77-bd00-c665acb7efe2-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:09:49 crc kubenswrapper[4911]: I0929 22:09:49.421263 4911 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/aa9a2254-fecc-4d77-bd00-c665acb7efe2-util\") on node \"crc\" DevicePath \"\"" Sep 29 22:09:49 crc kubenswrapper[4911]: I0929 22:09:49.939443 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d7cbwj" event={"ID":"aa9a2254-fecc-4d77-bd00-c665acb7efe2","Type":"ContainerDied","Data":"9fb4ac9360f62967e8521ddb3ad1434f5f2d2e7b0445a2b4b12d48edc337e50a"} Sep 29 22:09:49 crc kubenswrapper[4911]: I0929 22:09:49.939489 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9fb4ac9360f62967e8521ddb3ad1434f5f2d2e7b0445a2b4b12d48edc337e50a" Sep 29 22:09:49 crc kubenswrapper[4911]: I0929 22:09:49.939551 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d7cbwj" Sep 29 22:09:53 crc kubenswrapper[4911]: I0929 22:09:53.358109 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-nc626/must-gather-v6s7x"] Sep 29 22:09:53 crc kubenswrapper[4911]: E0929 22:09:53.358847 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="135c9e3e-6b82-4a96-8684-ba2d8b9fe7f3" containerName="pull" Sep 29 22:09:53 crc kubenswrapper[4911]: I0929 22:09:53.358861 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="135c9e3e-6b82-4a96-8684-ba2d8b9fe7f3" containerName="pull" Sep 29 22:09:53 crc kubenswrapper[4911]: E0929 22:09:53.358878 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="135c9e3e-6b82-4a96-8684-ba2d8b9fe7f3" containerName="util" Sep 29 22:09:53 crc kubenswrapper[4911]: I0929 22:09:53.358884 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="135c9e3e-6b82-4a96-8684-ba2d8b9fe7f3" containerName="util" Sep 29 22:09:53 crc kubenswrapper[4911]: E0929 22:09:53.358898 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa9a2254-fecc-4d77-bd00-c665acb7efe2" containerName="pull" Sep 29 22:09:53 crc kubenswrapper[4911]: I0929 22:09:53.358904 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa9a2254-fecc-4d77-bd00-c665acb7efe2" containerName="pull" Sep 29 22:09:53 crc kubenswrapper[4911]: E0929 22:09:53.358924 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa9a2254-fecc-4d77-bd00-c665acb7efe2" containerName="extract" Sep 29 22:09:53 crc kubenswrapper[4911]: I0929 22:09:53.358929 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa9a2254-fecc-4d77-bd00-c665acb7efe2" containerName="extract" Sep 29 22:09:53 crc kubenswrapper[4911]: E0929 22:09:53.358945 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="135c9e3e-6b82-4a96-8684-ba2d8b9fe7f3" containerName="extract" Sep 29 22:09:53 crc kubenswrapper[4911]: I0929 22:09:53.358951 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="135c9e3e-6b82-4a96-8684-ba2d8b9fe7f3" containerName="extract" Sep 29 22:09:53 crc kubenswrapper[4911]: E0929 22:09:53.358960 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa9a2254-fecc-4d77-bd00-c665acb7efe2" containerName="util" Sep 29 22:09:53 crc kubenswrapper[4911]: I0929 22:09:53.358966 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa9a2254-fecc-4d77-bd00-c665acb7efe2" containerName="util" Sep 29 22:09:53 crc kubenswrapper[4911]: I0929 22:09:53.359144 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa9a2254-fecc-4d77-bd00-c665acb7efe2" containerName="extract" Sep 29 22:09:53 crc kubenswrapper[4911]: I0929 22:09:53.359167 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="135c9e3e-6b82-4a96-8684-ba2d8b9fe7f3" containerName="extract" Sep 29 22:09:53 crc kubenswrapper[4911]: I0929 22:09:53.360117 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-nc626/must-gather-v6s7x" Sep 29 22:09:53 crc kubenswrapper[4911]: I0929 22:09:53.371326 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-nc626"/"openshift-service-ca.crt" Sep 29 22:09:53 crc kubenswrapper[4911]: I0929 22:09:53.371570 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-nc626"/"default-dockercfg-hln2p" Sep 29 22:09:53 crc kubenswrapper[4911]: I0929 22:09:53.371717 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-nc626"/"kube-root-ca.crt" Sep 29 22:09:53 crc kubenswrapper[4911]: I0929 22:09:53.394312 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-nc626/must-gather-v6s7x"] Sep 29 22:09:53 crc kubenswrapper[4911]: I0929 22:09:53.500511 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/59852fea-b786-4bdc-9542-693035a9a063-must-gather-output\") pod \"must-gather-v6s7x\" (UID: \"59852fea-b786-4bdc-9542-693035a9a063\") " pod="openshift-must-gather-nc626/must-gather-v6s7x" Sep 29 22:09:53 crc kubenswrapper[4911]: I0929 22:09:53.500899 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8pdkk\" (UniqueName: \"kubernetes.io/projected/59852fea-b786-4bdc-9542-693035a9a063-kube-api-access-8pdkk\") pod \"must-gather-v6s7x\" (UID: \"59852fea-b786-4bdc-9542-693035a9a063\") " pod="openshift-must-gather-nc626/must-gather-v6s7x" Sep 29 22:09:53 crc kubenswrapper[4911]: I0929 22:09:53.602364 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/59852fea-b786-4bdc-9542-693035a9a063-must-gather-output\") pod \"must-gather-v6s7x\" (UID: \"59852fea-b786-4bdc-9542-693035a9a063\") " pod="openshift-must-gather-nc626/must-gather-v6s7x" Sep 29 22:09:53 crc kubenswrapper[4911]: I0929 22:09:53.602410 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8pdkk\" (UniqueName: \"kubernetes.io/projected/59852fea-b786-4bdc-9542-693035a9a063-kube-api-access-8pdkk\") pod \"must-gather-v6s7x\" (UID: \"59852fea-b786-4bdc-9542-693035a9a063\") " pod="openshift-must-gather-nc626/must-gather-v6s7x" Sep 29 22:09:53 crc kubenswrapper[4911]: I0929 22:09:53.602892 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/59852fea-b786-4bdc-9542-693035a9a063-must-gather-output\") pod \"must-gather-v6s7x\" (UID: \"59852fea-b786-4bdc-9542-693035a9a063\") " pod="openshift-must-gather-nc626/must-gather-v6s7x" Sep 29 22:09:53 crc kubenswrapper[4911]: I0929 22:09:53.636415 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8pdkk\" (UniqueName: \"kubernetes.io/projected/59852fea-b786-4bdc-9542-693035a9a063-kube-api-access-8pdkk\") pod \"must-gather-v6s7x\" (UID: \"59852fea-b786-4bdc-9542-693035a9a063\") " pod="openshift-must-gather-nc626/must-gather-v6s7x" Sep 29 22:09:53 crc kubenswrapper[4911]: I0929 22:09:53.720723 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-nc626/must-gather-v6s7x" Sep 29 22:09:54 crc kubenswrapper[4911]: I0929 22:09:54.129391 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-nc626/must-gather-v6s7x"] Sep 29 22:09:55 crc kubenswrapper[4911]: I0929 22:09:55.003726 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-nc626/must-gather-v6s7x" event={"ID":"59852fea-b786-4bdc-9542-693035a9a063","Type":"ContainerStarted","Data":"3234cfdbb48dd4706382c0b080bd1af1b07db6501a2db7304f04798ee0f3dcb0"} Sep 29 22:09:55 crc kubenswrapper[4911]: I0929 22:09:55.210644 4911 patch_prober.go:28] interesting pod/machine-config-daemon-w647f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 22:09:55 crc kubenswrapper[4911]: I0929 22:09:55.210707 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 22:09:55 crc kubenswrapper[4911]: I0929 22:09:55.210756 4911 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-w647f" Sep 29 22:09:55 crc kubenswrapper[4911]: I0929 22:09:55.211527 4911 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7be79346af5955e6a8de71a5a3427d7157502674786602aada446da1543ebd80"} pod="openshift-machine-config-operator/machine-config-daemon-w647f" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 22:09:55 crc kubenswrapper[4911]: I0929 22:09:55.211597 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" containerName="machine-config-daemon" containerID="cri-o://7be79346af5955e6a8de71a5a3427d7157502674786602aada446da1543ebd80" gracePeriod=600 Sep 29 22:09:56 crc kubenswrapper[4911]: I0929 22:09:56.020201 4911 generic.go:334] "Generic (PLEG): container finished" podID="50640abc-40db-4390-82d1-f3cfc76da71c" containerID="7be79346af5955e6a8de71a5a3427d7157502674786602aada446da1543ebd80" exitCode=0 Sep 29 22:09:56 crc kubenswrapper[4911]: I0929 22:09:56.020293 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w647f" event={"ID":"50640abc-40db-4390-82d1-f3cfc76da71c","Type":"ContainerDied","Data":"7be79346af5955e6a8de71a5a3427d7157502674786602aada446da1543ebd80"} Sep 29 22:09:56 crc kubenswrapper[4911]: I0929 22:09:56.020630 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w647f" event={"ID":"50640abc-40db-4390-82d1-f3cfc76da71c","Type":"ContainerStarted","Data":"fed63abb46220bf90b5156f960830a58b0cc2be8812d554b2887537e4211823f"} Sep 29 22:09:56 crc kubenswrapper[4911]: I0929 22:09:56.020646 4911 scope.go:117] "RemoveContainer" containerID="36e2ce9b1f454aea68a43b1961f21ef4bddb0194f435c478ced137f34d81bd50" Sep 29 22:10:01 crc kubenswrapper[4911]: I0929 22:10:01.081965 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-nc626/must-gather-v6s7x" event={"ID":"59852fea-b786-4bdc-9542-693035a9a063","Type":"ContainerStarted","Data":"e2996291d7122c0af470e4007c40beed48ed2660789cdef200bacb4207106cbb"} Sep 29 22:10:01 crc kubenswrapper[4911]: I0929 22:10:01.700982 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-7c8cf85677-vg7l9"] Sep 29 22:10:01 crc kubenswrapper[4911]: I0929 22:10:01.702352 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-vg7l9" Sep 29 22:10:01 crc kubenswrapper[4911]: I0929 22:10:01.710628 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"openshift-service-ca.crt" Sep 29 22:10:01 crc kubenswrapper[4911]: I0929 22:10:01.710732 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"kube-root-ca.crt" Sep 29 22:10:01 crc kubenswrapper[4911]: I0929 22:10:01.710837 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-dockercfg-wjjct" Sep 29 22:10:01 crc kubenswrapper[4911]: I0929 22:10:01.726146 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-7c8cf85677-vg7l9"] Sep 29 22:10:01 crc kubenswrapper[4911]: I0929 22:10:01.756499 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kdzhf\" (UniqueName: \"kubernetes.io/projected/fec367b2-98aa-4597-b64f-b5bdc79b0663-kube-api-access-kdzhf\") pod \"obo-prometheus-operator-7c8cf85677-vg7l9\" (UID: \"fec367b2-98aa-4597-b64f-b5bdc79b0663\") " pod="openshift-operators/obo-prometheus-operator-7c8cf85677-vg7l9" Sep 29 22:10:01 crc kubenswrapper[4911]: I0929 22:10:01.838251 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-5958f48f7b-lprbm"] Sep 29 22:10:01 crc kubenswrapper[4911]: I0929 22:10:01.839567 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5958f48f7b-lprbm" Sep 29 22:10:01 crc kubenswrapper[4911]: I0929 22:10:01.841300 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-service-cert" Sep 29 22:10:01 crc kubenswrapper[4911]: I0929 22:10:01.841604 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-dockercfg-gv57t" Sep 29 22:10:01 crc kubenswrapper[4911]: I0929 22:10:01.858268 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ac733ce2-123b-4300-ae15-adffd62f927a-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-5958f48f7b-lprbm\" (UID: \"ac733ce2-123b-4300-ae15-adffd62f927a\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5958f48f7b-lprbm" Sep 29 22:10:01 crc kubenswrapper[4911]: I0929 22:10:01.858328 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/ac733ce2-123b-4300-ae15-adffd62f927a-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-5958f48f7b-lprbm\" (UID: \"ac733ce2-123b-4300-ae15-adffd62f927a\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5958f48f7b-lprbm" Sep 29 22:10:01 crc kubenswrapper[4911]: I0929 22:10:01.858367 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kdzhf\" (UniqueName: \"kubernetes.io/projected/fec367b2-98aa-4597-b64f-b5bdc79b0663-kube-api-access-kdzhf\") pod \"obo-prometheus-operator-7c8cf85677-vg7l9\" (UID: \"fec367b2-98aa-4597-b64f-b5bdc79b0663\") " pod="openshift-operators/obo-prometheus-operator-7c8cf85677-vg7l9" Sep 29 22:10:01 crc kubenswrapper[4911]: I0929 22:10:01.862839 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-5958f48f7b-lprbm"] Sep 29 22:10:01 crc kubenswrapper[4911]: I0929 22:10:01.870894 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-5958f48f7b-jrc7l"] Sep 29 22:10:01 crc kubenswrapper[4911]: I0929 22:10:01.872177 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5958f48f7b-jrc7l" Sep 29 22:10:01 crc kubenswrapper[4911]: I0929 22:10:01.882834 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-5958f48f7b-jrc7l"] Sep 29 22:10:01 crc kubenswrapper[4911]: I0929 22:10:01.899485 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kdzhf\" (UniqueName: \"kubernetes.io/projected/fec367b2-98aa-4597-b64f-b5bdc79b0663-kube-api-access-kdzhf\") pod \"obo-prometheus-operator-7c8cf85677-vg7l9\" (UID: \"fec367b2-98aa-4597-b64f-b5bdc79b0663\") " pod="openshift-operators/obo-prometheus-operator-7c8cf85677-vg7l9" Sep 29 22:10:01 crc kubenswrapper[4911]: I0929 22:10:01.959498 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/84376021-7e21-4053-ae59-3665a4c9c507-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-5958f48f7b-jrc7l\" (UID: \"84376021-7e21-4053-ae59-3665a4c9c507\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5958f48f7b-jrc7l" Sep 29 22:10:01 crc kubenswrapper[4911]: I0929 22:10:01.959588 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ac733ce2-123b-4300-ae15-adffd62f927a-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-5958f48f7b-lprbm\" (UID: \"ac733ce2-123b-4300-ae15-adffd62f927a\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5958f48f7b-lprbm" Sep 29 22:10:01 crc kubenswrapper[4911]: I0929 22:10:01.959619 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/ac733ce2-123b-4300-ae15-adffd62f927a-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-5958f48f7b-lprbm\" (UID: \"ac733ce2-123b-4300-ae15-adffd62f927a\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5958f48f7b-lprbm" Sep 29 22:10:01 crc kubenswrapper[4911]: I0929 22:10:01.959645 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/84376021-7e21-4053-ae59-3665a4c9c507-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-5958f48f7b-jrc7l\" (UID: \"84376021-7e21-4053-ae59-3665a4c9c507\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5958f48f7b-jrc7l" Sep 29 22:10:01 crc kubenswrapper[4911]: I0929 22:10:01.963538 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ac733ce2-123b-4300-ae15-adffd62f927a-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-5958f48f7b-lprbm\" (UID: \"ac733ce2-123b-4300-ae15-adffd62f927a\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5958f48f7b-lprbm" Sep 29 22:10:01 crc kubenswrapper[4911]: I0929 22:10:01.977221 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/ac733ce2-123b-4300-ae15-adffd62f927a-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-5958f48f7b-lprbm\" (UID: \"ac733ce2-123b-4300-ae15-adffd62f927a\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5958f48f7b-lprbm" Sep 29 22:10:02 crc kubenswrapper[4911]: I0929 22:10:02.034182 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-vg7l9" Sep 29 22:10:02 crc kubenswrapper[4911]: I0929 22:10:02.045433 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/observability-operator-cc5f78dfc-75pqc"] Sep 29 22:10:02 crc kubenswrapper[4911]: I0929 22:10:02.046650 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-cc5f78dfc-75pqc" Sep 29 22:10:02 crc kubenswrapper[4911]: I0929 22:10:02.048430 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-sa-dockercfg-xglxv" Sep 29 22:10:02 crc kubenswrapper[4911]: I0929 22:10:02.048637 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-tls" Sep 29 22:10:02 crc kubenswrapper[4911]: I0929 22:10:02.060803 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/84376021-7e21-4053-ae59-3665a4c9c507-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-5958f48f7b-jrc7l\" (UID: \"84376021-7e21-4053-ae59-3665a4c9c507\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5958f48f7b-jrc7l" Sep 29 22:10:02 crc kubenswrapper[4911]: I0929 22:10:02.060914 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/84376021-7e21-4053-ae59-3665a4c9c507-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-5958f48f7b-jrc7l\" (UID: \"84376021-7e21-4053-ae59-3665a4c9c507\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5958f48f7b-jrc7l" Sep 29 22:10:02 crc kubenswrapper[4911]: I0929 22:10:02.077459 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/84376021-7e21-4053-ae59-3665a4c9c507-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-5958f48f7b-jrc7l\" (UID: \"84376021-7e21-4053-ae59-3665a4c9c507\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5958f48f7b-jrc7l" Sep 29 22:10:02 crc kubenswrapper[4911]: I0929 22:10:02.084308 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/84376021-7e21-4053-ae59-3665a4c9c507-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-5958f48f7b-jrc7l\" (UID: \"84376021-7e21-4053-ae59-3665a4c9c507\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5958f48f7b-jrc7l" Sep 29 22:10:02 crc kubenswrapper[4911]: I0929 22:10:02.085649 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-cc5f78dfc-75pqc"] Sep 29 22:10:02 crc kubenswrapper[4911]: I0929 22:10:02.105060 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-nc626/must-gather-v6s7x" event={"ID":"59852fea-b786-4bdc-9542-693035a9a063","Type":"ContainerStarted","Data":"3c0385b583e21d41359438d700f89d3311f20c42470ccbdbf89154e491bb12cb"} Sep 29 22:10:02 crc kubenswrapper[4911]: I0929 22:10:02.133928 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-nc626/must-gather-v6s7x" podStartSLOduration=2.57380326 podStartE2EDuration="9.133905415s" podCreationTimestamp="2025-09-29 22:09:53 +0000 UTC" firstStartedPulling="2025-09-29 22:09:54.159743907 +0000 UTC m=+2672.136856578" lastFinishedPulling="2025-09-29 22:10:00.719846062 +0000 UTC m=+2678.696958733" observedRunningTime="2025-09-29 22:10:02.127372161 +0000 UTC m=+2680.104484832" watchObservedRunningTime="2025-09-29 22:10:02.133905415 +0000 UTC m=+2680.111018086" Sep 29 22:10:02 crc kubenswrapper[4911]: I0929 22:10:02.162906 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j65p7\" (UniqueName: \"kubernetes.io/projected/31c4b3c7-c9bd-4b98-bb0d-b2240bddaa12-kube-api-access-j65p7\") pod \"observability-operator-cc5f78dfc-75pqc\" (UID: \"31c4b3c7-c9bd-4b98-bb0d-b2240bddaa12\") " pod="openshift-operators/observability-operator-cc5f78dfc-75pqc" Sep 29 22:10:02 crc kubenswrapper[4911]: I0929 22:10:02.162961 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/31c4b3c7-c9bd-4b98-bb0d-b2240bddaa12-observability-operator-tls\") pod \"observability-operator-cc5f78dfc-75pqc\" (UID: \"31c4b3c7-c9bd-4b98-bb0d-b2240bddaa12\") " pod="openshift-operators/observability-operator-cc5f78dfc-75pqc" Sep 29 22:10:02 crc kubenswrapper[4911]: I0929 22:10:02.170225 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5958f48f7b-lprbm" Sep 29 22:10:02 crc kubenswrapper[4911]: I0929 22:10:02.204851 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/perses-operator-54bc95c9fb-xvj9h"] Sep 29 22:10:02 crc kubenswrapper[4911]: I0929 22:10:02.206369 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-54bc95c9fb-xvj9h" Sep 29 22:10:02 crc kubenswrapper[4911]: I0929 22:10:02.236475 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"perses-operator-dockercfg-wg2wf" Sep 29 22:10:02 crc kubenswrapper[4911]: I0929 22:10:02.240718 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5958f48f7b-jrc7l" Sep 29 22:10:02 crc kubenswrapper[4911]: I0929 22:10:02.274000 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-54bc95c9fb-xvj9h"] Sep 29 22:10:02 crc kubenswrapper[4911]: I0929 22:10:02.275278 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j65p7\" (UniqueName: \"kubernetes.io/projected/31c4b3c7-c9bd-4b98-bb0d-b2240bddaa12-kube-api-access-j65p7\") pod \"observability-operator-cc5f78dfc-75pqc\" (UID: \"31c4b3c7-c9bd-4b98-bb0d-b2240bddaa12\") " pod="openshift-operators/observability-operator-cc5f78dfc-75pqc" Sep 29 22:10:02 crc kubenswrapper[4911]: I0929 22:10:02.275356 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/31c4b3c7-c9bd-4b98-bb0d-b2240bddaa12-observability-operator-tls\") pod \"observability-operator-cc5f78dfc-75pqc\" (UID: \"31c4b3c7-c9bd-4b98-bb0d-b2240bddaa12\") " pod="openshift-operators/observability-operator-cc5f78dfc-75pqc" Sep 29 22:10:02 crc kubenswrapper[4911]: I0929 22:10:02.282744 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/31c4b3c7-c9bd-4b98-bb0d-b2240bddaa12-observability-operator-tls\") pod \"observability-operator-cc5f78dfc-75pqc\" (UID: \"31c4b3c7-c9bd-4b98-bb0d-b2240bddaa12\") " pod="openshift-operators/observability-operator-cc5f78dfc-75pqc" Sep 29 22:10:02 crc kubenswrapper[4911]: I0929 22:10:02.322177 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j65p7\" (UniqueName: \"kubernetes.io/projected/31c4b3c7-c9bd-4b98-bb0d-b2240bddaa12-kube-api-access-j65p7\") pod \"observability-operator-cc5f78dfc-75pqc\" (UID: \"31c4b3c7-c9bd-4b98-bb0d-b2240bddaa12\") " pod="openshift-operators/observability-operator-cc5f78dfc-75pqc" Sep 29 22:10:02 crc kubenswrapper[4911]: I0929 22:10:02.380836 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t58qk\" (UniqueName: \"kubernetes.io/projected/e8f2e1f7-e311-4126-853a-a85eac6e689c-kube-api-access-t58qk\") pod \"perses-operator-54bc95c9fb-xvj9h\" (UID: \"e8f2e1f7-e311-4126-853a-a85eac6e689c\") " pod="openshift-operators/perses-operator-54bc95c9fb-xvj9h" Sep 29 22:10:02 crc kubenswrapper[4911]: I0929 22:10:02.380964 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/e8f2e1f7-e311-4126-853a-a85eac6e689c-openshift-service-ca\") pod \"perses-operator-54bc95c9fb-xvj9h\" (UID: \"e8f2e1f7-e311-4126-853a-a85eac6e689c\") " pod="openshift-operators/perses-operator-54bc95c9fb-xvj9h" Sep 29 22:10:02 crc kubenswrapper[4911]: I0929 22:10:02.482092 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-cc5f78dfc-75pqc" Sep 29 22:10:02 crc kubenswrapper[4911]: I0929 22:10:02.483522 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/e8f2e1f7-e311-4126-853a-a85eac6e689c-openshift-service-ca\") pod \"perses-operator-54bc95c9fb-xvj9h\" (UID: \"e8f2e1f7-e311-4126-853a-a85eac6e689c\") " pod="openshift-operators/perses-operator-54bc95c9fb-xvj9h" Sep 29 22:10:02 crc kubenswrapper[4911]: I0929 22:10:02.483596 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t58qk\" (UniqueName: \"kubernetes.io/projected/e8f2e1f7-e311-4126-853a-a85eac6e689c-kube-api-access-t58qk\") pod \"perses-operator-54bc95c9fb-xvj9h\" (UID: \"e8f2e1f7-e311-4126-853a-a85eac6e689c\") " pod="openshift-operators/perses-operator-54bc95c9fb-xvj9h" Sep 29 22:10:02 crc kubenswrapper[4911]: I0929 22:10:02.484500 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/e8f2e1f7-e311-4126-853a-a85eac6e689c-openshift-service-ca\") pod \"perses-operator-54bc95c9fb-xvj9h\" (UID: \"e8f2e1f7-e311-4126-853a-a85eac6e689c\") " pod="openshift-operators/perses-operator-54bc95c9fb-xvj9h" Sep 29 22:10:02 crc kubenswrapper[4911]: I0929 22:10:02.519709 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t58qk\" (UniqueName: \"kubernetes.io/projected/e8f2e1f7-e311-4126-853a-a85eac6e689c-kube-api-access-t58qk\") pod \"perses-operator-54bc95c9fb-xvj9h\" (UID: \"e8f2e1f7-e311-4126-853a-a85eac6e689c\") " pod="openshift-operators/perses-operator-54bc95c9fb-xvj9h" Sep 29 22:10:02 crc kubenswrapper[4911]: I0929 22:10:02.584018 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-54bc95c9fb-xvj9h" Sep 29 22:10:02 crc kubenswrapper[4911]: I0929 22:10:02.758046 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-7c8cf85677-vg7l9"] Sep 29 22:10:03 crc kubenswrapper[4911]: I0929 22:10:03.082817 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-5958f48f7b-lprbm"] Sep 29 22:10:03 crc kubenswrapper[4911]: W0929 22:10:03.092220 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podac733ce2_123b_4300_ae15_adffd62f927a.slice/crio-1172b6bda66dd26ba66dfddf80943126e791206332783e969f7a0d2d750f4746 WatchSource:0}: Error finding container 1172b6bda66dd26ba66dfddf80943126e791206332783e969f7a0d2d750f4746: Status 404 returned error can't find the container with id 1172b6bda66dd26ba66dfddf80943126e791206332783e969f7a0d2d750f4746 Sep 29 22:10:03 crc kubenswrapper[4911]: I0929 22:10:03.137620 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5958f48f7b-lprbm" event={"ID":"ac733ce2-123b-4300-ae15-adffd62f927a","Type":"ContainerStarted","Data":"1172b6bda66dd26ba66dfddf80943126e791206332783e969f7a0d2d750f4746"} Sep 29 22:10:03 crc kubenswrapper[4911]: I0929 22:10:03.149546 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-vg7l9" event={"ID":"fec367b2-98aa-4597-b64f-b5bdc79b0663","Type":"ContainerStarted","Data":"ee9e90d5eea2f9dc066cf29ed7b49c92565e6dc4ac2b30cd98d72dbd8ff5235b"} Sep 29 22:10:03 crc kubenswrapper[4911]: I0929 22:10:03.221597 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-5958f48f7b-jrc7l"] Sep 29 22:10:03 crc kubenswrapper[4911]: W0929 22:10:03.235687 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod84376021_7e21_4053_ae59_3665a4c9c507.slice/crio-4601558ab5e53317d2fcac63e18a1af6494ddf1bfdd87fdfee96665a2795fd33 WatchSource:0}: Error finding container 4601558ab5e53317d2fcac63e18a1af6494ddf1bfdd87fdfee96665a2795fd33: Status 404 returned error can't find the container with id 4601558ab5e53317d2fcac63e18a1af6494ddf1bfdd87fdfee96665a2795fd33 Sep 29 22:10:03 crc kubenswrapper[4911]: I0929 22:10:03.275268 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-cc5f78dfc-75pqc"] Sep 29 22:10:03 crc kubenswrapper[4911]: I0929 22:10:03.348774 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-54bc95c9fb-xvj9h"] Sep 29 22:10:03 crc kubenswrapper[4911]: W0929 22:10:03.352448 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode8f2e1f7_e311_4126_853a_a85eac6e689c.slice/crio-a36d56bfac62aaacac4b5c1fbc27dd3d75b91dbceeec8bd82415a69fbe4aa1b9 WatchSource:0}: Error finding container a36d56bfac62aaacac4b5c1fbc27dd3d75b91dbceeec8bd82415a69fbe4aa1b9: Status 404 returned error can't find the container with id a36d56bfac62aaacac4b5c1fbc27dd3d75b91dbceeec8bd82415a69fbe4aa1b9 Sep 29 22:10:04 crc kubenswrapper[4911]: I0929 22:10:04.158838 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-54bc95c9fb-xvj9h" event={"ID":"e8f2e1f7-e311-4126-853a-a85eac6e689c","Type":"ContainerStarted","Data":"a36d56bfac62aaacac4b5c1fbc27dd3d75b91dbceeec8bd82415a69fbe4aa1b9"} Sep 29 22:10:04 crc kubenswrapper[4911]: I0929 22:10:04.160597 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5958f48f7b-jrc7l" event={"ID":"84376021-7e21-4053-ae59-3665a4c9c507","Type":"ContainerStarted","Data":"4601558ab5e53317d2fcac63e18a1af6494ddf1bfdd87fdfee96665a2795fd33"} Sep 29 22:10:04 crc kubenswrapper[4911]: I0929 22:10:04.162051 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-cc5f78dfc-75pqc" event={"ID":"31c4b3c7-c9bd-4b98-bb0d-b2240bddaa12","Type":"ContainerStarted","Data":"1bd8ec40eafeec9174cc545f98f554b1d77cb0499e3313ea4c3d4ac20768c915"} Sep 29 22:10:08 crc kubenswrapper[4911]: E0929 22:10:08.912005 4911 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.251:34846->38.102.83.251:46765: write tcp 38.102.83.251:34846->38.102.83.251:46765: write: broken pipe Sep 29 22:10:10 crc kubenswrapper[4911]: I0929 22:10:10.020964 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-nc626/crc-debug-x8dwv"] Sep 29 22:10:10 crc kubenswrapper[4911]: I0929 22:10:10.022427 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-nc626/crc-debug-x8dwv" Sep 29 22:10:10 crc kubenswrapper[4911]: I0929 22:10:10.119770 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6d204224-a175-4357-9642-e09a9f8cafbc-host\") pod \"crc-debug-x8dwv\" (UID: \"6d204224-a175-4357-9642-e09a9f8cafbc\") " pod="openshift-must-gather-nc626/crc-debug-x8dwv" Sep 29 22:10:10 crc kubenswrapper[4911]: I0929 22:10:10.119852 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p7ff6\" (UniqueName: \"kubernetes.io/projected/6d204224-a175-4357-9642-e09a9f8cafbc-kube-api-access-p7ff6\") pod \"crc-debug-x8dwv\" (UID: \"6d204224-a175-4357-9642-e09a9f8cafbc\") " pod="openshift-must-gather-nc626/crc-debug-x8dwv" Sep 29 22:10:10 crc kubenswrapper[4911]: I0929 22:10:10.221873 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6d204224-a175-4357-9642-e09a9f8cafbc-host\") pod \"crc-debug-x8dwv\" (UID: \"6d204224-a175-4357-9642-e09a9f8cafbc\") " pod="openshift-must-gather-nc626/crc-debug-x8dwv" Sep 29 22:10:10 crc kubenswrapper[4911]: I0929 22:10:10.221931 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p7ff6\" (UniqueName: \"kubernetes.io/projected/6d204224-a175-4357-9642-e09a9f8cafbc-kube-api-access-p7ff6\") pod \"crc-debug-x8dwv\" (UID: \"6d204224-a175-4357-9642-e09a9f8cafbc\") " pod="openshift-must-gather-nc626/crc-debug-x8dwv" Sep 29 22:10:10 crc kubenswrapper[4911]: I0929 22:10:10.222379 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6d204224-a175-4357-9642-e09a9f8cafbc-host\") pod \"crc-debug-x8dwv\" (UID: \"6d204224-a175-4357-9642-e09a9f8cafbc\") " pod="openshift-must-gather-nc626/crc-debug-x8dwv" Sep 29 22:10:10 crc kubenswrapper[4911]: I0929 22:10:10.259634 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p7ff6\" (UniqueName: \"kubernetes.io/projected/6d204224-a175-4357-9642-e09a9f8cafbc-kube-api-access-p7ff6\") pod \"crc-debug-x8dwv\" (UID: \"6d204224-a175-4357-9642-e09a9f8cafbc\") " pod="openshift-must-gather-nc626/crc-debug-x8dwv" Sep 29 22:10:10 crc kubenswrapper[4911]: I0929 22:10:10.364419 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-nc626/crc-debug-x8dwv" Sep 29 22:10:23 crc kubenswrapper[4911]: E0929 22:10:23.643840 4911 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/cluster-observability-rhel9-operator@sha256:27ffe36aad6e606e6d0a211f48f3cdb58a53aa0d5e8ead6a444427231261ab9e" Sep 29 22:10:23 crc kubenswrapper[4911]: E0929 22:10:23.644525 4911 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:registry.redhat.io/cluster-observability-operator/cluster-observability-rhel9-operator@sha256:27ffe36aad6e606e6d0a211f48f3cdb58a53aa0d5e8ead6a444427231261ab9e,Command:[],Args:[--namespace=$(NAMESPACE) --images=alertmanager=$(RELATED_IMAGE_ALERTMANAGER) --images=prometheus=$(RELATED_IMAGE_PROMETHEUS) --images=thanos=$(RELATED_IMAGE_THANOS) --images=perses=$(RELATED_IMAGE_PERSES) --images=ui-dashboards=$(RELATED_IMAGE_CONSOLE_DASHBOARDS_PLUGIN) --images=ui-distributed-tracing=$(RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN) --images=ui-distributed-tracing-pf5=$(RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN_PF5) --images=ui-distributed-tracing-pf4=$(RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN_PF4) --images=ui-logging=$(RELATED_IMAGE_CONSOLE_LOGGING_PLUGIN) --images=ui-logging-pf4=$(RELATED_IMAGE_CONSOLE_LOGGING_PLUGIN_PF4) --images=ui-troubleshooting-panel=$(RELATED_IMAGE_CONSOLE_TROUBLESHOOTING_PANEL_PLUGIN) --images=ui-monitoring=$(RELATED_IMAGE_CONSOLE_MONITORING_PLUGIN) --images=ui-monitoring-pf5=$(RELATED_IMAGE_CONSOLE_MONITORING_PLUGIN_PF5) --images=korrel8r=$(RELATED_IMAGE_KORREL8R) --images=health-analyzer=$(RELATED_IMAGE_CLUSTER_HEALTH_ANALYZER) --openshift.enabled=true],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:RELATED_IMAGE_ALERTMANAGER,Value:registry.redhat.io/cluster-observability-operator/alertmanager-rhel9@sha256:4d25b0e31549d780928d2dd3eed7defd9c6d460deb92dcff0fe72c5023029404,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PROMETHEUS,Value:registry.redhat.io/cluster-observability-operator/prometheus-rhel9@sha256:a0a1d0e39de54c5b2786c2b82d0104f358b479135c069075ddd4f7cd76826c00,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_THANOS,Value:registry.redhat.io/cluster-observability-operator/thanos-rhel9@sha256:f3806c97420ec8ba91895ce7627df7612cccb927c05d7854377f45cdd6c924a8,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PERSES,Value:registry.redhat.io/cluster-observability-operator/perses-0-50-rhel9@sha256:4b5e53d226733237fc5abd0476eb3c96162cf3d8da7aeba8deda631fa8987223,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_DASHBOARDS_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/dashboards-console-plugin-0-4-rhel9@sha256:53125bddbefca2ba2b57c3fd74bd4b376da803e420201220548878f557bd6610,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/distributed-tracing-console-plugin-1-0-rhel9@sha256:1dbe9a684271e00c8f36d8b96c9b22f6ee3c6f907ea6ad20980901bd533f9a3a,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN_PF5,Value:registry.redhat.io/cluster-observability-operator/distributed-tracing-console-plugin-0-4-rhel9@sha256:6aafab2c90bcbc6702f2d63d585a764baa8de8207e6af7afa60f3976ddfa9bd3,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN_PF4,Value:registry.redhat.io/cluster-observability-operator/distributed-tracing-console-plugin-0-3-rhel9@sha256:9f80851e8137c2c5e5c2aee13fc663f6c7124d9524d88c06c1507748ce84e1ed,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_LOGGING_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/logging-console-plugin-6-1-rhel9@sha256:2c9b2be12f15f06a24393dbab6a31682cee399d42e2cc04b0dcf03b2b598d5cf,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_LOGGING_PLUGIN_PF4,Value:registry.redhat.io/cluster-observability-operator/logging-console-plugin-6-0-rhel9@sha256:e9042d93f624790c450724158a8323277e4dd136530c763fec8db31f51fd8552,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_TROUBLESHOOTING_PANEL_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/troubleshooting-panel-console-plugin-0-4-rhel9@sha256:456d45001816b9adc38745e0ad8705bdc0150d03d0f65e0dfa9caf3fb8980fad,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_MONITORING_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/monitoring-console-plugin-0-5-rhel9@sha256:f3446969c67c18b44bee38ac946091fe9397a2117cb5b7aacb39406461c1efe1,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_MONITORING_PLUGIN_PF5,Value:registry.redhat.io/cluster-observability-operator/monitoring-console-plugin-0-4-rhel9@sha256:ade84f8be7d23bd4b9c80e07462dc947280f0bcf6071e6edd927fef54c254b7e,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_KORREL8R,Value:registry.redhat.io/cluster-observability-operator/korrel8r-rhel9@sha256:039e139cf9217bbe72248674df76cbe4baf4bef9f8dc367d2cb51eae9c4aa9d7,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CLUSTER_HEALTH_ANALYZER,Value:registry.redhat.io/cluster-observability-operator/cluster-health-analyzer-rhel9@sha256:142180f277f0221ef2d4176f9af6dcdb4e7ab434a68f0dfad2ee5bee0e667ddd,ValueFrom:nil,},EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cluster-observability-operator.v1.2.2,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{400 -3} {} 400m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{100 -3} {} 100m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:observability-operator-tls,ReadOnly:true,MountPath:/etc/tls/private,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-j65p7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000350000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod observability-operator-cc5f78dfc-75pqc_openshift-operators(31c4b3c7-c9bd-4b98-bb0d-b2240bddaa12): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 29 22:10:23 crc kubenswrapper[4911]: E0929 22:10:23.646024 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-operators/observability-operator-cc5f78dfc-75pqc" podUID="31c4b3c7-c9bd-4b98-bb0d-b2240bddaa12" Sep 29 22:10:24 crc kubenswrapper[4911]: E0929 22:10:24.483655 4911 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:e54c1e1301be66933f3ecb01d5a0ca27f58aabfd905b18b7d057bbf23bdb7b0d" Sep 29 22:10:24 crc kubenswrapper[4911]: E0929 22:10:24.483899 4911 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:prometheus-operator-admission-webhook,Image:registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:e54c1e1301be66933f3ecb01d5a0ca27f58aabfd905b18b7d057bbf23bdb7b0d,Command:[],Args:[--web.enable-tls=true --web.cert-file=/tmp/k8s-webhook-server/serving-certs/tls.crt --web.key-file=/tmp/k8s-webhook-server/serving-certs/tls.key],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cluster-observability-operator.v1.2.2,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{209715200 0} {} BinarySI},},Requests:ResourceList{cpu: {{50 -3} {} 50m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:apiservice-cert,ReadOnly:false,MountPath:/apiserver.local.config/certificates,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:webhook-cert,ReadOnly:false,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod obo-prometheus-operator-admission-webhook-5958f48f7b-jrc7l_openshift-operators(84376021-7e21-4053-ae59-3665a4c9c507): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 29 22:10:24 crc kubenswrapper[4911]: E0929 22:10:24.485303 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator-admission-webhook\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5958f48f7b-jrc7l" podUID="84376021-7e21-4053-ae59-3665a4c9c507" Sep 29 22:10:24 crc kubenswrapper[4911]: E0929 22:10:24.488828 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/cluster-observability-rhel9-operator@sha256:27ffe36aad6e606e6d0a211f48f3cdb58a53aa0d5e8ead6a444427231261ab9e\\\"\"" pod="openshift-operators/observability-operator-cc5f78dfc-75pqc" podUID="31c4b3c7-c9bd-4b98-bb0d-b2240bddaa12" Sep 29 22:10:24 crc kubenswrapper[4911]: E0929 22:10:24.492850 4911 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/perses-0-1-rhel9-operator@sha256:bfed9f442aea6e8165644f1dc615beea06ec7fd84ea3f8ca393a63d3627c6a7c" Sep 29 22:10:24 crc kubenswrapper[4911]: E0929 22:10:24.493189 4911 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:perses-operator,Image:registry.redhat.io/cluster-observability-operator/perses-0-1-rhel9-operator@sha256:bfed9f442aea6e8165644f1dc615beea06ec7fd84ea3f8ca393a63d3627c6a7c,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cluster-observability-operator.v1.2.2,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{100 -3} {} 100m DecimalSI},memory: {{134217728 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:openshift-service-ca,ReadOnly:true,MountPath:/ca,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-t58qk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000350000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod perses-operator-54bc95c9fb-xvj9h_openshift-operators(e8f2e1f7-e311-4126-853a-a85eac6e689c): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 29 22:10:24 crc kubenswrapper[4911]: E0929 22:10:24.494409 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"perses-operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-operators/perses-operator-54bc95c9fb-xvj9h" podUID="e8f2e1f7-e311-4126-853a-a85eac6e689c" Sep 29 22:10:25 crc kubenswrapper[4911]: I0929 22:10:25.477073 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5958f48f7b-lprbm" event={"ID":"ac733ce2-123b-4300-ae15-adffd62f927a","Type":"ContainerStarted","Data":"50aa0f8710e958ed2f580a5ba20ccd3cf427f27c4af001fb7af36a54d1bb5812"} Sep 29 22:10:25 crc kubenswrapper[4911]: I0929 22:10:25.481402 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-vg7l9" event={"ID":"fec367b2-98aa-4597-b64f-b5bdc79b0663","Type":"ContainerStarted","Data":"30da52e711030daff27e3af8d930a0213e204347e46f08f7945f818ee1ab0aa7"} Sep 29 22:10:25 crc kubenswrapper[4911]: I0929 22:10:25.482952 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-nc626/crc-debug-x8dwv" event={"ID":"6d204224-a175-4357-9642-e09a9f8cafbc","Type":"ContainerStarted","Data":"c4eb6fbde1b3fdd45ef52c3f91ac1ce2db494ceb73e22d4a8f3c2eae8950abc7"} Sep 29 22:10:25 crc kubenswrapper[4911]: E0929 22:10:25.483617 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"perses-operator\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/perses-0-1-rhel9-operator@sha256:bfed9f442aea6e8165644f1dc615beea06ec7fd84ea3f8ca393a63d3627c6a7c\\\"\"" pod="openshift-operators/perses-operator-54bc95c9fb-xvj9h" podUID="e8f2e1f7-e311-4126-853a-a85eac6e689c" Sep 29 22:10:25 crc kubenswrapper[4911]: I0929 22:10:25.498885 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5958f48f7b-lprbm" podStartSLOduration=3.026068322 podStartE2EDuration="24.498869305s" podCreationTimestamp="2025-09-29 22:10:01 +0000 UTC" firstStartedPulling="2025-09-29 22:10:03.112559793 +0000 UTC m=+2681.089672464" lastFinishedPulling="2025-09-29 22:10:24.585360776 +0000 UTC m=+2702.562473447" observedRunningTime="2025-09-29 22:10:25.497335987 +0000 UTC m=+2703.474448668" watchObservedRunningTime="2025-09-29 22:10:25.498869305 +0000 UTC m=+2703.475981976" Sep 29 22:10:25 crc kubenswrapper[4911]: I0929 22:10:25.626055 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-vg7l9" podStartSLOduration=2.874079289 podStartE2EDuration="24.626017682s" podCreationTimestamp="2025-09-29 22:10:01 +0000 UTC" firstStartedPulling="2025-09-29 22:10:02.772300651 +0000 UTC m=+2680.749413322" lastFinishedPulling="2025-09-29 22:10:24.524239044 +0000 UTC m=+2702.501351715" observedRunningTime="2025-09-29 22:10:25.612093457 +0000 UTC m=+2703.589206128" watchObservedRunningTime="2025-09-29 22:10:25.626017682 +0000 UTC m=+2703.603130353" Sep 29 22:10:26 crc kubenswrapper[4911]: I0929 22:10:26.496645 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5958f48f7b-jrc7l" event={"ID":"84376021-7e21-4053-ae59-3665a4c9c507","Type":"ContainerStarted","Data":"33cb3ec305a0508621f6fd773729a48a9e6a06e558fe48b54dba21f67f43324f"} Sep 29 22:10:26 crc kubenswrapper[4911]: I0929 22:10:26.514656 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5958f48f7b-jrc7l" podStartSLOduration=-9223372011.340137 podStartE2EDuration="25.514637393s" podCreationTimestamp="2025-09-29 22:10:01 +0000 UTC" firstStartedPulling="2025-09-29 22:10:03.243722824 +0000 UTC m=+2681.220835495" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:10:26.514132847 +0000 UTC m=+2704.491245518" watchObservedRunningTime="2025-09-29 22:10:26.514637393 +0000 UTC m=+2704.491750064" Sep 29 22:10:36 crc kubenswrapper[4911]: I0929 22:10:36.598466 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-nc626/crc-debug-x8dwv" event={"ID":"6d204224-a175-4357-9642-e09a9f8cafbc","Type":"ContainerStarted","Data":"5fb4458e80d6d3d992520ddb9ae20e416cdc8b3f552ec341393f8c2f8bc63d43"} Sep 29 22:10:36 crc kubenswrapper[4911]: I0929 22:10:36.620011 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-nc626/crc-debug-x8dwv" podStartSLOduration=15.717750971 podStartE2EDuration="26.619991504s" podCreationTimestamp="2025-09-29 22:10:10 +0000 UTC" firstStartedPulling="2025-09-29 22:10:24.5477515 +0000 UTC m=+2702.524864171" lastFinishedPulling="2025-09-29 22:10:35.449992033 +0000 UTC m=+2713.427104704" observedRunningTime="2025-09-29 22:10:36.614317847 +0000 UTC m=+2714.591430518" watchObservedRunningTime="2025-09-29 22:10:36.619991504 +0000 UTC m=+2714.597104185" Sep 29 22:10:39 crc kubenswrapper[4911]: I0929 22:10:39.635525 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-cc5f78dfc-75pqc" event={"ID":"31c4b3c7-c9bd-4b98-bb0d-b2240bddaa12","Type":"ContainerStarted","Data":"260ead9915f3dc0c22777e95938dea99c96bcdc6baf4e02f5810d977678b7d8a"} Sep 29 22:10:39 crc kubenswrapper[4911]: I0929 22:10:39.637531 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/observability-operator-cc5f78dfc-75pqc" Sep 29 22:10:39 crc kubenswrapper[4911]: I0929 22:10:39.638772 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/observability-operator-cc5f78dfc-75pqc" Sep 29 22:10:39 crc kubenswrapper[4911]: I0929 22:10:39.662750 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/observability-operator-cc5f78dfc-75pqc" podStartSLOduration=2.162002343 podStartE2EDuration="37.662729894s" podCreationTimestamp="2025-09-29 22:10:02 +0000 UTC" firstStartedPulling="2025-09-29 22:10:03.285419199 +0000 UTC m=+2681.262531870" lastFinishedPulling="2025-09-29 22:10:38.78614675 +0000 UTC m=+2716.763259421" observedRunningTime="2025-09-29 22:10:39.655748467 +0000 UTC m=+2717.632861158" watchObservedRunningTime="2025-09-29 22:10:39.662729894 +0000 UTC m=+2717.639842575" Sep 29 22:10:40 crc kubenswrapper[4911]: I0929 22:10:40.648735 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-54bc95c9fb-xvj9h" event={"ID":"e8f2e1f7-e311-4126-853a-a85eac6e689c","Type":"ContainerStarted","Data":"5a8082a5aa81d049ffab63dec589bf436f137fd7f3890f7a6cd3426e83543f19"} Sep 29 22:10:40 crc kubenswrapper[4911]: I0929 22:10:40.650046 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/perses-operator-54bc95c9fb-xvj9h" Sep 29 22:10:40 crc kubenswrapper[4911]: I0929 22:10:40.672751 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/perses-operator-54bc95c9fb-xvj9h" podStartSLOduration=2.456287016 podStartE2EDuration="38.672732942s" podCreationTimestamp="2025-09-29 22:10:02 +0000 UTC" firstStartedPulling="2025-09-29 22:10:03.355373616 +0000 UTC m=+2681.332486287" lastFinishedPulling="2025-09-29 22:10:39.571819542 +0000 UTC m=+2717.548932213" observedRunningTime="2025-09-29 22:10:40.669555242 +0000 UTC m=+2718.646667913" watchObservedRunningTime="2025-09-29 22:10:40.672732942 +0000 UTC m=+2718.649845613" Sep 29 22:10:52 crc kubenswrapper[4911]: I0929 22:10:52.591496 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/perses-operator-54bc95c9fb-xvj9h" Sep 29 22:11:25 crc kubenswrapper[4911]: I0929 22:11:25.262758 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-7b8dc76f9b-7vhr7_0b2e8086-d33d-420a-8dea-5e892d02b5eb/barbican-api/0.log" Sep 29 22:11:25 crc kubenswrapper[4911]: I0929 22:11:25.303493 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-7b8dc76f9b-7vhr7_0b2e8086-d33d-420a-8dea-5e892d02b5eb/barbican-api-log/0.log" Sep 29 22:11:25 crc kubenswrapper[4911]: I0929 22:11:25.447312 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-5d79d56b48-nd52h_1fee9300-cab0-441a-a47f-a5a1fc02c24d/barbican-keystone-listener/0.log" Sep 29 22:11:25 crc kubenswrapper[4911]: I0929 22:11:25.501982 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-5d79d56b48-nd52h_1fee9300-cab0-441a-a47f-a5a1fc02c24d/barbican-keystone-listener-log/0.log" Sep 29 22:11:25 crc kubenswrapper[4911]: I0929 22:11:25.678058 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-844ff9b7c7-s8vcn_115e92e2-b175-4f4b-a9fe-cdd0b7d3d104/barbican-worker/0.log" Sep 29 22:11:25 crc kubenswrapper[4911]: I0929 22:11:25.715252 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-844ff9b7c7-s8vcn_115e92e2-b175-4f4b-a9fe-cdd0b7d3d104/barbican-worker-log/0.log" Sep 29 22:11:25 crc kubenswrapper[4911]: I0929 22:11:25.935634 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-6srbt_8a132bff-9655-4b4f-9574-ff04307fa051/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 22:11:26 crc kubenswrapper[4911]: I0929 22:11:26.087821 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_c346bcf5-d568-4d43-87ee-e243f8332bcb/cinder-api/0.log" Sep 29 22:11:26 crc kubenswrapper[4911]: I0929 22:11:26.160878 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_c346bcf5-d568-4d43-87ee-e243f8332bcb/cinder-api-log/0.log" Sep 29 22:11:26 crc kubenswrapper[4911]: I0929 22:11:26.297117 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_896c8bf4-4402-448b-867f-ffd69d511949/cinder-scheduler/0.log" Sep 29 22:11:26 crc kubenswrapper[4911]: I0929 22:11:26.329469 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_896c8bf4-4402-448b-867f-ffd69d511949/probe/0.log" Sep 29 22:11:26 crc kubenswrapper[4911]: I0929 22:11:26.498546 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-hsmg5_e6ae5985-a9f0-4e22-b7d0-fe5b6aa574e4/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 22:11:26 crc kubenswrapper[4911]: I0929 22:11:26.565203 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-8hxpz_e3e5ed96-035f-4645-a6b6-f92c01981ad4/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 22:11:26 crc kubenswrapper[4911]: I0929 22:11:26.706771 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-cb6ffcf87-msfsz_0b286d05-e9e4-4862-bbf0-6f4f658a56e0/init/0.log" Sep 29 22:11:26 crc kubenswrapper[4911]: I0929 22:11:26.902035 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-cb6ffcf87-msfsz_0b286d05-e9e4-4862-bbf0-6f4f658a56e0/init/0.log" Sep 29 22:11:26 crc kubenswrapper[4911]: I0929 22:11:26.934553 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-cb6ffcf87-msfsz_0b286d05-e9e4-4862-bbf0-6f4f658a56e0/dnsmasq-dns/0.log" Sep 29 22:11:27 crc kubenswrapper[4911]: I0929 22:11:27.114682 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-m4ghz_a7ffc2de-d6d9-4e9f-a6c8-a4080e155c99/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 22:11:27 crc kubenswrapper[4911]: I0929 22:11:27.182277 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_b31746fe-e0eb-4ce1-9d20-3abc7b66ebf4/glance-httpd/0.log" Sep 29 22:11:27 crc kubenswrapper[4911]: I0929 22:11:27.337409 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_b31746fe-e0eb-4ce1-9d20-3abc7b66ebf4/glance-log/0.log" Sep 29 22:11:27 crc kubenswrapper[4911]: I0929 22:11:27.388174 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_2ae35250-2041-4b17-8829-f0d982384d7e/glance-httpd/0.log" Sep 29 22:11:27 crc kubenswrapper[4911]: I0929 22:11:27.533130 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_2ae35250-2041-4b17-8829-f0d982384d7e/glance-log/0.log" Sep 29 22:11:27 crc kubenswrapper[4911]: I0929 22:11:27.565750 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-slrkc_ca413103-e15f-4ee7-94fa-3f402c7393b2/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 22:11:27 crc kubenswrapper[4911]: I0929 22:11:27.785512 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-cp7l4_78314a84-b641-4e3e-9aff-f4c9dd5553fe/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 22:11:27 crc kubenswrapper[4911]: I0929 22:11:27.975477 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-7fbf6b8688-rgddw_0aeb1728-fd0e-46cd-ba53-8cba740a66ee/keystone-api/0.log" Sep 29 22:11:28 crc kubenswrapper[4911]: I0929 22:11:28.064546 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29319721-rjzjd_bbf8467c-b7de-4104-b9a7-59e1d163bfb7/keystone-cron/0.log" Sep 29 22:11:28 crc kubenswrapper[4911]: I0929 22:11:28.342972 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-xbkr4_840af368-3414-4fe0-915a-5629b81bbdf4/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 22:11:28 crc kubenswrapper[4911]: I0929 22:11:28.591521 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-6b4bcd6f7-dzn4f_0920ca6c-fcb6-466b-9a0e-099dde91d938/neutron-api/0.log" Sep 29 22:11:28 crc kubenswrapper[4911]: I0929 22:11:28.677297 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-6b4bcd6f7-dzn4f_0920ca6c-fcb6-466b-9a0e-099dde91d938/neutron-httpd/0.log" Sep 29 22:11:28 crc kubenswrapper[4911]: I0929 22:11:28.858514 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-vptdq_b40f1414-088a-40e3-a07c-041c6e461771/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 22:11:29 crc kubenswrapper[4911]: I0929 22:11:29.218731 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_64c7af34-3461-4dcd-9caf-5ff6f5fb90af/nova-api-log/0.log" Sep 29 22:11:29 crc kubenswrapper[4911]: I0929 22:11:29.239005 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_64c7af34-3461-4dcd-9caf-5ff6f5fb90af/nova-api-api/0.log" Sep 29 22:11:29 crc kubenswrapper[4911]: I0929 22:11:29.424070 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_43d54eea-b9ec-4034-9c29-e9426e27f65b/nova-cell0-conductor-conductor/0.log" Sep 29 22:11:29 crc kubenswrapper[4911]: I0929 22:11:29.555371 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_fb394e72-1a5f-4815-a6d2-e2636239108f/nova-cell1-conductor-conductor/0.log" Sep 29 22:11:29 crc kubenswrapper[4911]: I0929 22:11:29.717550 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_752cc0da-4c33-4e13-9dbc-e7034e072533/nova-cell1-novncproxy-novncproxy/0.log" Sep 29 22:11:29 crc kubenswrapper[4911]: I0929 22:11:29.927606 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-dzlt5_ee01ae86-832b-41fb-8a4a-53a73ffdb3aa/nova-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 22:11:30 crc kubenswrapper[4911]: I0929 22:11:30.142260 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_50bcb1f1-af2c-410c-bbca-fc0fd9b05f4e/nova-metadata-log/0.log" Sep 29 22:11:30 crc kubenswrapper[4911]: I0929 22:11:30.512010 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_b196d32a-bb27-4cc0-929f-c49f7a33a52a/nova-scheduler-scheduler/0.log" Sep 29 22:11:30 crc kubenswrapper[4911]: I0929 22:11:30.658966 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_df57577d-bb06-4339-a3e6-27a2cf733d17/mysql-bootstrap/0.log" Sep 29 22:11:30 crc kubenswrapper[4911]: I0929 22:11:30.888140 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_df57577d-bb06-4339-a3e6-27a2cf733d17/galera/0.log" Sep 29 22:11:30 crc kubenswrapper[4911]: I0929 22:11:30.896422 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_df57577d-bb06-4339-a3e6-27a2cf733d17/mysql-bootstrap/0.log" Sep 29 22:11:30 crc kubenswrapper[4911]: I0929 22:11:30.972459 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_50bcb1f1-af2c-410c-bbca-fc0fd9b05f4e/nova-metadata-metadata/0.log" Sep 29 22:11:31 crc kubenswrapper[4911]: I0929 22:11:31.163177 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_9fb504fe-401c-4b1d-af71-171d017883be/mysql-bootstrap/0.log" Sep 29 22:11:31 crc kubenswrapper[4911]: I0929 22:11:31.319937 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_9fb504fe-401c-4b1d-af71-171d017883be/mysql-bootstrap/0.log" Sep 29 22:11:31 crc kubenswrapper[4911]: I0929 22:11:31.373973 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_9fb504fe-401c-4b1d-af71-171d017883be/galera/0.log" Sep 29 22:11:31 crc kubenswrapper[4911]: I0929 22:11:31.577727 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_3ca83d01-449e-43fd-a6e3-3a1da30ec45b/openstackclient/0.log" Sep 29 22:11:31 crc kubenswrapper[4911]: I0929 22:11:31.628102 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-prkv9_b85ad830-8615-4f00-8d68-a2cb2b08dd68/openstack-network-exporter/0.log" Sep 29 22:11:31 crc kubenswrapper[4911]: I0929 22:11:31.860120 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-nm8s4_755d3290-eae3-4e58-9870-63681ce460d5/ovn-controller/0.log" Sep 29 22:11:32 crc kubenswrapper[4911]: I0929 22:11:32.075309 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-xhwxg_30d80d24-2072-4fa9-aa03-4448c693ec5f/ovsdb-server-init/0.log" Sep 29 22:11:32 crc kubenswrapper[4911]: I0929 22:11:32.235714 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-xhwxg_30d80d24-2072-4fa9-aa03-4448c693ec5f/ovsdb-server-init/0.log" Sep 29 22:11:32 crc kubenswrapper[4911]: I0929 22:11:32.294141 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-xhwxg_30d80d24-2072-4fa9-aa03-4448c693ec5f/ovsdb-server/0.log" Sep 29 22:11:32 crc kubenswrapper[4911]: I0929 22:11:32.325708 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-xhwxg_30d80d24-2072-4fa9-aa03-4448c693ec5f/ovs-vswitchd/0.log" Sep 29 22:11:32 crc kubenswrapper[4911]: I0929 22:11:32.537645 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-d6sx5_9cecf675-656c-4eab-97c6-7fbd57ee26e8/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 22:11:32 crc kubenswrapper[4911]: I0929 22:11:32.685967 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_7e6d47ac-0fbd-4e48-8ec0-8b8c7fd086d2/openstack-network-exporter/0.log" Sep 29 22:11:32 crc kubenswrapper[4911]: I0929 22:11:32.757581 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_7e6d47ac-0fbd-4e48-8ec0-8b8c7fd086d2/ovn-northd/0.log" Sep 29 22:11:32 crc kubenswrapper[4911]: I0929 22:11:32.906641 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_7ccc0850-1d12-486d-bb28-2ebd69c456e0/openstack-network-exporter/0.log" Sep 29 22:11:32 crc kubenswrapper[4911]: I0929 22:11:32.965332 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_7ccc0850-1d12-486d-bb28-2ebd69c456e0/ovsdbserver-nb/0.log" Sep 29 22:11:33 crc kubenswrapper[4911]: I0929 22:11:33.101213 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_3ad46165-7cec-489f-a199-71ed3a5f1c44/openstack-network-exporter/0.log" Sep 29 22:11:33 crc kubenswrapper[4911]: I0929 22:11:33.179784 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_3ad46165-7cec-489f-a199-71ed3a5f1c44/ovsdbserver-sb/0.log" Sep 29 22:11:33 crc kubenswrapper[4911]: I0929 22:11:33.324184 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-65555f7b56-th6vc_00d05bc5-6a5d-4a65-8afa-8d9ea429e2c1/placement-api/0.log" Sep 29 22:11:33 crc kubenswrapper[4911]: I0929 22:11:33.454525 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-65555f7b56-th6vc_00d05bc5-6a5d-4a65-8afa-8d9ea429e2c1/placement-log/0.log" Sep 29 22:11:33 crc kubenswrapper[4911]: I0929 22:11:33.510775 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_ba53369f-42c7-4fb2-82e2-cf4eaebcedd7/setup-container/0.log" Sep 29 22:11:33 crc kubenswrapper[4911]: I0929 22:11:33.798715 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_ba53369f-42c7-4fb2-82e2-cf4eaebcedd7/setup-container/0.log" Sep 29 22:11:33 crc kubenswrapper[4911]: I0929 22:11:33.851292 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_ba53369f-42c7-4fb2-82e2-cf4eaebcedd7/rabbitmq/0.log" Sep 29 22:11:33 crc kubenswrapper[4911]: I0929 22:11:33.975739 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_a4102e69-ec0f-43d2-aaf9-0b760d487420/setup-container/0.log" Sep 29 22:11:34 crc kubenswrapper[4911]: I0929 22:11:34.202057 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_a4102e69-ec0f-43d2-aaf9-0b760d487420/setup-container/0.log" Sep 29 22:11:34 crc kubenswrapper[4911]: I0929 22:11:34.252182 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_a4102e69-ec0f-43d2-aaf9-0b760d487420/rabbitmq/0.log" Sep 29 22:11:34 crc kubenswrapper[4911]: I0929 22:11:34.423798 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-ljflp_87480cd8-68c7-4315-ac1d-7c10d5fb6b79/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 22:11:34 crc kubenswrapper[4911]: I0929 22:11:34.486090 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-hp9bp_170521df-782e-45b7-9ae9-389fff67083e/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 22:11:34 crc kubenswrapper[4911]: I0929 22:11:34.725746 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-zp5fn_44ed0dd9-96df-4d55-b788-5e82df516063/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 22:11:34 crc kubenswrapper[4911]: I0929 22:11:34.976361 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-ddxcp_97ff1dd2-9857-4a1f-879f-741477ecc4a8/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 22:11:35 crc kubenswrapper[4911]: I0929 22:11:35.081106 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-ntqrx_871d427c-7323-4980-aa4f-d9c835dd2d91/ssh-known-hosts-edpm-deployment/0.log" Sep 29 22:11:35 crc kubenswrapper[4911]: I0929 22:11:35.328935 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-c9f6bd49f-qt4wx_5a0d597e-d509-41b7-839f-3b4b76863ab8/proxy-server/0.log" Sep 29 22:11:35 crc kubenswrapper[4911]: I0929 22:11:35.336496 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-c9f6bd49f-qt4wx_5a0d597e-d509-41b7-839f-3b4b76863ab8/proxy-httpd/0.log" Sep 29 22:11:35 crc kubenswrapper[4911]: I0929 22:11:35.527414 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-878rf_66dda823-f2ce-4a7a-9632-ea5a4022de8d/swift-ring-rebalance/0.log" Sep 29 22:11:35 crc kubenswrapper[4911]: I0929 22:11:35.722354 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_00a836d8-70df-4583-948f-e6869d77e432/account-auditor/0.log" Sep 29 22:11:35 crc kubenswrapper[4911]: I0929 22:11:35.785469 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_00a836d8-70df-4583-948f-e6869d77e432/account-reaper/0.log" Sep 29 22:11:35 crc kubenswrapper[4911]: I0929 22:11:35.870445 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_00a836d8-70df-4583-948f-e6869d77e432/account-replicator/0.log" Sep 29 22:11:35 crc kubenswrapper[4911]: I0929 22:11:35.925104 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_00a836d8-70df-4583-948f-e6869d77e432/account-server/0.log" Sep 29 22:11:35 crc kubenswrapper[4911]: I0929 22:11:35.988621 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_00a836d8-70df-4583-948f-e6869d77e432/container-auditor/0.log" Sep 29 22:11:36 crc kubenswrapper[4911]: I0929 22:11:36.080710 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_00a836d8-70df-4583-948f-e6869d77e432/container-replicator/0.log" Sep 29 22:11:36 crc kubenswrapper[4911]: I0929 22:11:36.148368 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_00a836d8-70df-4583-948f-e6869d77e432/container-server/0.log" Sep 29 22:11:36 crc kubenswrapper[4911]: I0929 22:11:36.177991 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_00a836d8-70df-4583-948f-e6869d77e432/container-updater/0.log" Sep 29 22:11:36 crc kubenswrapper[4911]: I0929 22:11:36.273968 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_00a836d8-70df-4583-948f-e6869d77e432/object-auditor/0.log" Sep 29 22:11:36 crc kubenswrapper[4911]: I0929 22:11:36.357593 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_00a836d8-70df-4583-948f-e6869d77e432/object-replicator/0.log" Sep 29 22:11:36 crc kubenswrapper[4911]: I0929 22:11:36.401842 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_00a836d8-70df-4583-948f-e6869d77e432/object-expirer/0.log" Sep 29 22:11:36 crc kubenswrapper[4911]: I0929 22:11:36.452515 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_00a836d8-70df-4583-948f-e6869d77e432/object-server/0.log" Sep 29 22:11:36 crc kubenswrapper[4911]: I0929 22:11:36.553011 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_00a836d8-70df-4583-948f-e6869d77e432/object-updater/0.log" Sep 29 22:11:36 crc kubenswrapper[4911]: I0929 22:11:36.590193 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_00a836d8-70df-4583-948f-e6869d77e432/rsync/0.log" Sep 29 22:11:36 crc kubenswrapper[4911]: I0929 22:11:36.637495 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_00a836d8-70df-4583-948f-e6869d77e432/swift-recon-cron/0.log" Sep 29 22:11:36 crc kubenswrapper[4911]: I0929 22:11:36.837156 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-vsqh9_16ca72ec-4fbd-4367-b5c8-ea180f6fc189/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 22:11:36 crc kubenswrapper[4911]: I0929 22:11:36.984438 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-ncdbx_697342c7-feea-4250-90f3-adca6bcada86/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 22:11:38 crc kubenswrapper[4911]: I0929 22:11:38.362825 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_5849b99d-1ca7-4258-b88a-704a89d46c4e/memcached/0.log" Sep 29 22:11:47 crc kubenswrapper[4911]: I0929 22:11:47.138252 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-wtbsl"] Sep 29 22:11:47 crc kubenswrapper[4911]: I0929 22:11:47.141296 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wtbsl" Sep 29 22:11:47 crc kubenswrapper[4911]: I0929 22:11:47.146270 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-wtbsl"] Sep 29 22:11:47 crc kubenswrapper[4911]: I0929 22:11:47.244887 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7fd751dd-46d1-4670-b2a6-2adc2a261fc8-utilities\") pod \"certified-operators-wtbsl\" (UID: \"7fd751dd-46d1-4670-b2a6-2adc2a261fc8\") " pod="openshift-marketplace/certified-operators-wtbsl" Sep 29 22:11:47 crc kubenswrapper[4911]: I0929 22:11:47.245229 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7fd751dd-46d1-4670-b2a6-2adc2a261fc8-catalog-content\") pod \"certified-operators-wtbsl\" (UID: \"7fd751dd-46d1-4670-b2a6-2adc2a261fc8\") " pod="openshift-marketplace/certified-operators-wtbsl" Sep 29 22:11:47 crc kubenswrapper[4911]: I0929 22:11:47.245269 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-htlqj\" (UniqueName: \"kubernetes.io/projected/7fd751dd-46d1-4670-b2a6-2adc2a261fc8-kube-api-access-htlqj\") pod \"certified-operators-wtbsl\" (UID: \"7fd751dd-46d1-4670-b2a6-2adc2a261fc8\") " pod="openshift-marketplace/certified-operators-wtbsl" Sep 29 22:11:47 crc kubenswrapper[4911]: I0929 22:11:47.347618 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7fd751dd-46d1-4670-b2a6-2adc2a261fc8-catalog-content\") pod \"certified-operators-wtbsl\" (UID: \"7fd751dd-46d1-4670-b2a6-2adc2a261fc8\") " pod="openshift-marketplace/certified-operators-wtbsl" Sep 29 22:11:47 crc kubenswrapper[4911]: I0929 22:11:47.347699 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-htlqj\" (UniqueName: \"kubernetes.io/projected/7fd751dd-46d1-4670-b2a6-2adc2a261fc8-kube-api-access-htlqj\") pod \"certified-operators-wtbsl\" (UID: \"7fd751dd-46d1-4670-b2a6-2adc2a261fc8\") " pod="openshift-marketplace/certified-operators-wtbsl" Sep 29 22:11:47 crc kubenswrapper[4911]: I0929 22:11:47.347871 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7fd751dd-46d1-4670-b2a6-2adc2a261fc8-utilities\") pod \"certified-operators-wtbsl\" (UID: \"7fd751dd-46d1-4670-b2a6-2adc2a261fc8\") " pod="openshift-marketplace/certified-operators-wtbsl" Sep 29 22:11:47 crc kubenswrapper[4911]: I0929 22:11:47.348141 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7fd751dd-46d1-4670-b2a6-2adc2a261fc8-catalog-content\") pod \"certified-operators-wtbsl\" (UID: \"7fd751dd-46d1-4670-b2a6-2adc2a261fc8\") " pod="openshift-marketplace/certified-operators-wtbsl" Sep 29 22:11:47 crc kubenswrapper[4911]: I0929 22:11:47.348345 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7fd751dd-46d1-4670-b2a6-2adc2a261fc8-utilities\") pod \"certified-operators-wtbsl\" (UID: \"7fd751dd-46d1-4670-b2a6-2adc2a261fc8\") " pod="openshift-marketplace/certified-operators-wtbsl" Sep 29 22:11:47 crc kubenswrapper[4911]: I0929 22:11:47.368680 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-htlqj\" (UniqueName: \"kubernetes.io/projected/7fd751dd-46d1-4670-b2a6-2adc2a261fc8-kube-api-access-htlqj\") pod \"certified-operators-wtbsl\" (UID: \"7fd751dd-46d1-4670-b2a6-2adc2a261fc8\") " pod="openshift-marketplace/certified-operators-wtbsl" Sep 29 22:11:47 crc kubenswrapper[4911]: I0929 22:11:47.463388 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wtbsl" Sep 29 22:11:47 crc kubenswrapper[4911]: I0929 22:11:47.957125 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-wtbsl"] Sep 29 22:11:48 crc kubenswrapper[4911]: I0929 22:11:48.273812 4911 generic.go:334] "Generic (PLEG): container finished" podID="7fd751dd-46d1-4670-b2a6-2adc2a261fc8" containerID="141c997fabb3e6c60ea452ed697d5891e20368753729efe2e0e4f19f9f620bc7" exitCode=0 Sep 29 22:11:48 crc kubenswrapper[4911]: I0929 22:11:48.274187 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wtbsl" event={"ID":"7fd751dd-46d1-4670-b2a6-2adc2a261fc8","Type":"ContainerDied","Data":"141c997fabb3e6c60ea452ed697d5891e20368753729efe2e0e4f19f9f620bc7"} Sep 29 22:11:48 crc kubenswrapper[4911]: I0929 22:11:48.274212 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wtbsl" event={"ID":"7fd751dd-46d1-4670-b2a6-2adc2a261fc8","Type":"ContainerStarted","Data":"428387cf0333807b9b7d871e321348c362307ad2bdfde3b618961bad4e8f94f1"} Sep 29 22:11:49 crc kubenswrapper[4911]: I0929 22:11:49.284331 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wtbsl" event={"ID":"7fd751dd-46d1-4670-b2a6-2adc2a261fc8","Type":"ContainerStarted","Data":"b8b4f6944cbfde2ab1edcd462402e5599ff44741c1551b307d9447692f2ec964"} Sep 29 22:11:50 crc kubenswrapper[4911]: I0929 22:11:50.295454 4911 generic.go:334] "Generic (PLEG): container finished" podID="7fd751dd-46d1-4670-b2a6-2adc2a261fc8" containerID="b8b4f6944cbfde2ab1edcd462402e5599ff44741c1551b307d9447692f2ec964" exitCode=0 Sep 29 22:11:50 crc kubenswrapper[4911]: I0929 22:11:50.295497 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wtbsl" event={"ID":"7fd751dd-46d1-4670-b2a6-2adc2a261fc8","Type":"ContainerDied","Data":"b8b4f6944cbfde2ab1edcd462402e5599ff44741c1551b307d9447692f2ec964"} Sep 29 22:11:51 crc kubenswrapper[4911]: I0929 22:11:51.307329 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wtbsl" event={"ID":"7fd751dd-46d1-4670-b2a6-2adc2a261fc8","Type":"ContainerStarted","Data":"1e6d60ecb526e61f8c41199d733f6a7e262a87dc24a434fee285095c03e4800c"} Sep 29 22:11:51 crc kubenswrapper[4911]: I0929 22:11:51.331719 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-wtbsl" podStartSLOduration=1.94796955 podStartE2EDuration="4.331698531s" podCreationTimestamp="2025-09-29 22:11:47 +0000 UTC" firstStartedPulling="2025-09-29 22:11:48.276212112 +0000 UTC m=+2786.253324783" lastFinishedPulling="2025-09-29 22:11:50.659941093 +0000 UTC m=+2788.637053764" observedRunningTime="2025-09-29 22:11:51.322469453 +0000 UTC m=+2789.299582124" watchObservedRunningTime="2025-09-29 22:11:51.331698531 +0000 UTC m=+2789.308811202" Sep 29 22:11:55 crc kubenswrapper[4911]: I0929 22:11:55.211239 4911 patch_prober.go:28] interesting pod/machine-config-daemon-w647f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 22:11:55 crc kubenswrapper[4911]: I0929 22:11:55.212104 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 22:11:57 crc kubenswrapper[4911]: I0929 22:11:57.464457 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-wtbsl" Sep 29 22:11:57 crc kubenswrapper[4911]: I0929 22:11:57.464979 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-wtbsl" Sep 29 22:11:57 crc kubenswrapper[4911]: I0929 22:11:57.522546 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-wtbsl" Sep 29 22:11:58 crc kubenswrapper[4911]: I0929 22:11:58.451138 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-wtbsl" Sep 29 22:11:58 crc kubenswrapper[4911]: I0929 22:11:58.534875 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-wtbsl"] Sep 29 22:12:00 crc kubenswrapper[4911]: I0929 22:12:00.418987 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-wtbsl" podUID="7fd751dd-46d1-4670-b2a6-2adc2a261fc8" containerName="registry-server" containerID="cri-o://1e6d60ecb526e61f8c41199d733f6a7e262a87dc24a434fee285095c03e4800c" gracePeriod=2 Sep 29 22:12:00 crc kubenswrapper[4911]: I0929 22:12:00.917481 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wtbsl" Sep 29 22:12:01 crc kubenswrapper[4911]: I0929 22:12:01.043649 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htlqj\" (UniqueName: \"kubernetes.io/projected/7fd751dd-46d1-4670-b2a6-2adc2a261fc8-kube-api-access-htlqj\") pod \"7fd751dd-46d1-4670-b2a6-2adc2a261fc8\" (UID: \"7fd751dd-46d1-4670-b2a6-2adc2a261fc8\") " Sep 29 22:12:01 crc kubenswrapper[4911]: I0929 22:12:01.043972 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7fd751dd-46d1-4670-b2a6-2adc2a261fc8-utilities\") pod \"7fd751dd-46d1-4670-b2a6-2adc2a261fc8\" (UID: \"7fd751dd-46d1-4670-b2a6-2adc2a261fc8\") " Sep 29 22:12:01 crc kubenswrapper[4911]: I0929 22:12:01.044022 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7fd751dd-46d1-4670-b2a6-2adc2a261fc8-catalog-content\") pod \"7fd751dd-46d1-4670-b2a6-2adc2a261fc8\" (UID: \"7fd751dd-46d1-4670-b2a6-2adc2a261fc8\") " Sep 29 22:12:01 crc kubenswrapper[4911]: I0929 22:12:01.054740 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7fd751dd-46d1-4670-b2a6-2adc2a261fc8-utilities" (OuterVolumeSpecName: "utilities") pod "7fd751dd-46d1-4670-b2a6-2adc2a261fc8" (UID: "7fd751dd-46d1-4670-b2a6-2adc2a261fc8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:12:01 crc kubenswrapper[4911]: I0929 22:12:01.059744 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7fd751dd-46d1-4670-b2a6-2adc2a261fc8-kube-api-access-htlqj" (OuterVolumeSpecName: "kube-api-access-htlqj") pod "7fd751dd-46d1-4670-b2a6-2adc2a261fc8" (UID: "7fd751dd-46d1-4670-b2a6-2adc2a261fc8"). InnerVolumeSpecName "kube-api-access-htlqj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:12:01 crc kubenswrapper[4911]: I0929 22:12:01.089081 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7fd751dd-46d1-4670-b2a6-2adc2a261fc8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7fd751dd-46d1-4670-b2a6-2adc2a261fc8" (UID: "7fd751dd-46d1-4670-b2a6-2adc2a261fc8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:12:01 crc kubenswrapper[4911]: I0929 22:12:01.146205 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htlqj\" (UniqueName: \"kubernetes.io/projected/7fd751dd-46d1-4670-b2a6-2adc2a261fc8-kube-api-access-htlqj\") on node \"crc\" DevicePath \"\"" Sep 29 22:12:01 crc kubenswrapper[4911]: I0929 22:12:01.146235 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7fd751dd-46d1-4670-b2a6-2adc2a261fc8-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 22:12:01 crc kubenswrapper[4911]: I0929 22:12:01.146244 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7fd751dd-46d1-4670-b2a6-2adc2a261fc8-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 22:12:01 crc kubenswrapper[4911]: I0929 22:12:01.429635 4911 generic.go:334] "Generic (PLEG): container finished" podID="7fd751dd-46d1-4670-b2a6-2adc2a261fc8" containerID="1e6d60ecb526e61f8c41199d733f6a7e262a87dc24a434fee285095c03e4800c" exitCode=0 Sep 29 22:12:01 crc kubenswrapper[4911]: I0929 22:12:01.429682 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wtbsl" Sep 29 22:12:01 crc kubenswrapper[4911]: I0929 22:12:01.429690 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wtbsl" event={"ID":"7fd751dd-46d1-4670-b2a6-2adc2a261fc8","Type":"ContainerDied","Data":"1e6d60ecb526e61f8c41199d733f6a7e262a87dc24a434fee285095c03e4800c"} Sep 29 22:12:01 crc kubenswrapper[4911]: I0929 22:12:01.429860 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wtbsl" event={"ID":"7fd751dd-46d1-4670-b2a6-2adc2a261fc8","Type":"ContainerDied","Data":"428387cf0333807b9b7d871e321348c362307ad2bdfde3b618961bad4e8f94f1"} Sep 29 22:12:01 crc kubenswrapper[4911]: I0929 22:12:01.429890 4911 scope.go:117] "RemoveContainer" containerID="1e6d60ecb526e61f8c41199d733f6a7e262a87dc24a434fee285095c03e4800c" Sep 29 22:12:01 crc kubenswrapper[4911]: I0929 22:12:01.452993 4911 scope.go:117] "RemoveContainer" containerID="b8b4f6944cbfde2ab1edcd462402e5599ff44741c1551b307d9447692f2ec964" Sep 29 22:12:01 crc kubenswrapper[4911]: I0929 22:12:01.484769 4911 scope.go:117] "RemoveContainer" containerID="141c997fabb3e6c60ea452ed697d5891e20368753729efe2e0e4f19f9f620bc7" Sep 29 22:12:01 crc kubenswrapper[4911]: I0929 22:12:01.497154 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-wtbsl"] Sep 29 22:12:01 crc kubenswrapper[4911]: I0929 22:12:01.528102 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-wtbsl"] Sep 29 22:12:01 crc kubenswrapper[4911]: I0929 22:12:01.554869 4911 scope.go:117] "RemoveContainer" containerID="1e6d60ecb526e61f8c41199d733f6a7e262a87dc24a434fee285095c03e4800c" Sep 29 22:12:01 crc kubenswrapper[4911]: E0929 22:12:01.555439 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1e6d60ecb526e61f8c41199d733f6a7e262a87dc24a434fee285095c03e4800c\": container with ID starting with 1e6d60ecb526e61f8c41199d733f6a7e262a87dc24a434fee285095c03e4800c not found: ID does not exist" containerID="1e6d60ecb526e61f8c41199d733f6a7e262a87dc24a434fee285095c03e4800c" Sep 29 22:12:01 crc kubenswrapper[4911]: I0929 22:12:01.555483 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1e6d60ecb526e61f8c41199d733f6a7e262a87dc24a434fee285095c03e4800c"} err="failed to get container status \"1e6d60ecb526e61f8c41199d733f6a7e262a87dc24a434fee285095c03e4800c\": rpc error: code = NotFound desc = could not find container \"1e6d60ecb526e61f8c41199d733f6a7e262a87dc24a434fee285095c03e4800c\": container with ID starting with 1e6d60ecb526e61f8c41199d733f6a7e262a87dc24a434fee285095c03e4800c not found: ID does not exist" Sep 29 22:12:01 crc kubenswrapper[4911]: I0929 22:12:01.555509 4911 scope.go:117] "RemoveContainer" containerID="b8b4f6944cbfde2ab1edcd462402e5599ff44741c1551b307d9447692f2ec964" Sep 29 22:12:01 crc kubenswrapper[4911]: E0929 22:12:01.555882 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b8b4f6944cbfde2ab1edcd462402e5599ff44741c1551b307d9447692f2ec964\": container with ID starting with b8b4f6944cbfde2ab1edcd462402e5599ff44741c1551b307d9447692f2ec964 not found: ID does not exist" containerID="b8b4f6944cbfde2ab1edcd462402e5599ff44741c1551b307d9447692f2ec964" Sep 29 22:12:01 crc kubenswrapper[4911]: I0929 22:12:01.556173 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b8b4f6944cbfde2ab1edcd462402e5599ff44741c1551b307d9447692f2ec964"} err="failed to get container status \"b8b4f6944cbfde2ab1edcd462402e5599ff44741c1551b307d9447692f2ec964\": rpc error: code = NotFound desc = could not find container \"b8b4f6944cbfde2ab1edcd462402e5599ff44741c1551b307d9447692f2ec964\": container with ID starting with b8b4f6944cbfde2ab1edcd462402e5599ff44741c1551b307d9447692f2ec964 not found: ID does not exist" Sep 29 22:12:01 crc kubenswrapper[4911]: I0929 22:12:01.556296 4911 scope.go:117] "RemoveContainer" containerID="141c997fabb3e6c60ea452ed697d5891e20368753729efe2e0e4f19f9f620bc7" Sep 29 22:12:01 crc kubenswrapper[4911]: E0929 22:12:01.556965 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"141c997fabb3e6c60ea452ed697d5891e20368753729efe2e0e4f19f9f620bc7\": container with ID starting with 141c997fabb3e6c60ea452ed697d5891e20368753729efe2e0e4f19f9f620bc7 not found: ID does not exist" containerID="141c997fabb3e6c60ea452ed697d5891e20368753729efe2e0e4f19f9f620bc7" Sep 29 22:12:01 crc kubenswrapper[4911]: I0929 22:12:01.556987 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"141c997fabb3e6c60ea452ed697d5891e20368753729efe2e0e4f19f9f620bc7"} err="failed to get container status \"141c997fabb3e6c60ea452ed697d5891e20368753729efe2e0e4f19f9f620bc7\": rpc error: code = NotFound desc = could not find container \"141c997fabb3e6c60ea452ed697d5891e20368753729efe2e0e4f19f9f620bc7\": container with ID starting with 141c997fabb3e6c60ea452ed697d5891e20368753729efe2e0e4f19f9f620bc7 not found: ID does not exist" Sep 29 22:12:02 crc kubenswrapper[4911]: I0929 22:12:02.739666 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7fd751dd-46d1-4670-b2a6-2adc2a261fc8" path="/var/lib/kubelet/pods/7fd751dd-46d1-4670-b2a6-2adc2a261fc8/volumes" Sep 29 22:12:10 crc kubenswrapper[4911]: I0929 22:12:10.555769 4911 generic.go:334] "Generic (PLEG): container finished" podID="6d204224-a175-4357-9642-e09a9f8cafbc" containerID="5fb4458e80d6d3d992520ddb9ae20e416cdc8b3f552ec341393f8c2f8bc63d43" exitCode=0 Sep 29 22:12:10 crc kubenswrapper[4911]: I0929 22:12:10.555877 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-nc626/crc-debug-x8dwv" event={"ID":"6d204224-a175-4357-9642-e09a9f8cafbc","Type":"ContainerDied","Data":"5fb4458e80d6d3d992520ddb9ae20e416cdc8b3f552ec341393f8c2f8bc63d43"} Sep 29 22:12:11 crc kubenswrapper[4911]: I0929 22:12:11.698784 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-nc626/crc-debug-x8dwv" Sep 29 22:12:11 crc kubenswrapper[4911]: I0929 22:12:11.743214 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-nc626/crc-debug-x8dwv"] Sep 29 22:12:11 crc kubenswrapper[4911]: I0929 22:12:11.753431 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-nc626/crc-debug-x8dwv"] Sep 29 22:12:11 crc kubenswrapper[4911]: I0929 22:12:11.786689 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p7ff6\" (UniqueName: \"kubernetes.io/projected/6d204224-a175-4357-9642-e09a9f8cafbc-kube-api-access-p7ff6\") pod \"6d204224-a175-4357-9642-e09a9f8cafbc\" (UID: \"6d204224-a175-4357-9642-e09a9f8cafbc\") " Sep 29 22:12:11 crc kubenswrapper[4911]: I0929 22:12:11.786767 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6d204224-a175-4357-9642-e09a9f8cafbc-host\") pod \"6d204224-a175-4357-9642-e09a9f8cafbc\" (UID: \"6d204224-a175-4357-9642-e09a9f8cafbc\") " Sep 29 22:12:11 crc kubenswrapper[4911]: I0929 22:12:11.788110 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6d204224-a175-4357-9642-e09a9f8cafbc-host" (OuterVolumeSpecName: "host") pod "6d204224-a175-4357-9642-e09a9f8cafbc" (UID: "6d204224-a175-4357-9642-e09a9f8cafbc"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 22:12:11 crc kubenswrapper[4911]: I0929 22:12:11.795099 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6d204224-a175-4357-9642-e09a9f8cafbc-kube-api-access-p7ff6" (OuterVolumeSpecName: "kube-api-access-p7ff6") pod "6d204224-a175-4357-9642-e09a9f8cafbc" (UID: "6d204224-a175-4357-9642-e09a9f8cafbc"). InnerVolumeSpecName "kube-api-access-p7ff6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:12:11 crc kubenswrapper[4911]: I0929 22:12:11.889842 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p7ff6\" (UniqueName: \"kubernetes.io/projected/6d204224-a175-4357-9642-e09a9f8cafbc-kube-api-access-p7ff6\") on node \"crc\" DevicePath \"\"" Sep 29 22:12:11 crc kubenswrapper[4911]: I0929 22:12:11.889898 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6d204224-a175-4357-9642-e09a9f8cafbc-host\") on node \"crc\" DevicePath \"\"" Sep 29 22:12:12 crc kubenswrapper[4911]: I0929 22:12:12.584080 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c4eb6fbde1b3fdd45ef52c3f91ac1ce2db494ceb73e22d4a8f3c2eae8950abc7" Sep 29 22:12:12 crc kubenswrapper[4911]: I0929 22:12:12.584191 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-nc626/crc-debug-x8dwv" Sep 29 22:12:12 crc kubenswrapper[4911]: I0929 22:12:12.717972 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6d204224-a175-4357-9642-e09a9f8cafbc" path="/var/lib/kubelet/pods/6d204224-a175-4357-9642-e09a9f8cafbc/volumes" Sep 29 22:12:12 crc kubenswrapper[4911]: I0929 22:12:12.894663 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-nc626/crc-debug-hzrm2"] Sep 29 22:12:12 crc kubenswrapper[4911]: E0929 22:12:12.895536 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7fd751dd-46d1-4670-b2a6-2adc2a261fc8" containerName="extract-utilities" Sep 29 22:12:12 crc kubenswrapper[4911]: I0929 22:12:12.895558 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="7fd751dd-46d1-4670-b2a6-2adc2a261fc8" containerName="extract-utilities" Sep 29 22:12:12 crc kubenswrapper[4911]: E0929 22:12:12.895578 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d204224-a175-4357-9642-e09a9f8cafbc" containerName="container-00" Sep 29 22:12:12 crc kubenswrapper[4911]: I0929 22:12:12.895586 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d204224-a175-4357-9642-e09a9f8cafbc" containerName="container-00" Sep 29 22:12:12 crc kubenswrapper[4911]: E0929 22:12:12.895614 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7fd751dd-46d1-4670-b2a6-2adc2a261fc8" containerName="extract-content" Sep 29 22:12:12 crc kubenswrapper[4911]: I0929 22:12:12.895622 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="7fd751dd-46d1-4670-b2a6-2adc2a261fc8" containerName="extract-content" Sep 29 22:12:12 crc kubenswrapper[4911]: E0929 22:12:12.895637 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7fd751dd-46d1-4670-b2a6-2adc2a261fc8" containerName="registry-server" Sep 29 22:12:12 crc kubenswrapper[4911]: I0929 22:12:12.895644 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="7fd751dd-46d1-4670-b2a6-2adc2a261fc8" containerName="registry-server" Sep 29 22:12:12 crc kubenswrapper[4911]: I0929 22:12:12.895889 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="7fd751dd-46d1-4670-b2a6-2adc2a261fc8" containerName="registry-server" Sep 29 22:12:12 crc kubenswrapper[4911]: I0929 22:12:12.895909 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d204224-a175-4357-9642-e09a9f8cafbc" containerName="container-00" Sep 29 22:12:12 crc kubenswrapper[4911]: I0929 22:12:12.896710 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-nc626/crc-debug-hzrm2" Sep 29 22:12:13 crc kubenswrapper[4911]: I0929 22:12:13.010078 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9c8wx\" (UniqueName: \"kubernetes.io/projected/7a54cb03-6eef-4262-a106-b029bb1d7aa4-kube-api-access-9c8wx\") pod \"crc-debug-hzrm2\" (UID: \"7a54cb03-6eef-4262-a106-b029bb1d7aa4\") " pod="openshift-must-gather-nc626/crc-debug-hzrm2" Sep 29 22:12:13 crc kubenswrapper[4911]: I0929 22:12:13.010145 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7a54cb03-6eef-4262-a106-b029bb1d7aa4-host\") pod \"crc-debug-hzrm2\" (UID: \"7a54cb03-6eef-4262-a106-b029bb1d7aa4\") " pod="openshift-must-gather-nc626/crc-debug-hzrm2" Sep 29 22:12:13 crc kubenswrapper[4911]: I0929 22:12:13.111483 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7a54cb03-6eef-4262-a106-b029bb1d7aa4-host\") pod \"crc-debug-hzrm2\" (UID: \"7a54cb03-6eef-4262-a106-b029bb1d7aa4\") " pod="openshift-must-gather-nc626/crc-debug-hzrm2" Sep 29 22:12:13 crc kubenswrapper[4911]: I0929 22:12:13.111644 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7a54cb03-6eef-4262-a106-b029bb1d7aa4-host\") pod \"crc-debug-hzrm2\" (UID: \"7a54cb03-6eef-4262-a106-b029bb1d7aa4\") " pod="openshift-must-gather-nc626/crc-debug-hzrm2" Sep 29 22:12:13 crc kubenswrapper[4911]: I0929 22:12:13.111665 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9c8wx\" (UniqueName: \"kubernetes.io/projected/7a54cb03-6eef-4262-a106-b029bb1d7aa4-kube-api-access-9c8wx\") pod \"crc-debug-hzrm2\" (UID: \"7a54cb03-6eef-4262-a106-b029bb1d7aa4\") " pod="openshift-must-gather-nc626/crc-debug-hzrm2" Sep 29 22:12:13 crc kubenswrapper[4911]: I0929 22:12:13.132034 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9c8wx\" (UniqueName: \"kubernetes.io/projected/7a54cb03-6eef-4262-a106-b029bb1d7aa4-kube-api-access-9c8wx\") pod \"crc-debug-hzrm2\" (UID: \"7a54cb03-6eef-4262-a106-b029bb1d7aa4\") " pod="openshift-must-gather-nc626/crc-debug-hzrm2" Sep 29 22:12:13 crc kubenswrapper[4911]: I0929 22:12:13.216035 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-nc626/crc-debug-hzrm2" Sep 29 22:12:13 crc kubenswrapper[4911]: I0929 22:12:13.598118 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-nc626/crc-debug-hzrm2" event={"ID":"7a54cb03-6eef-4262-a106-b029bb1d7aa4","Type":"ContainerStarted","Data":"9c4ab25c15c61993dc56dc1ea7d4821e780cec2a7f35447da4acf46edc955e5e"} Sep 29 22:12:13 crc kubenswrapper[4911]: I0929 22:12:13.598459 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-nc626/crc-debug-hzrm2" event={"ID":"7a54cb03-6eef-4262-a106-b029bb1d7aa4","Type":"ContainerStarted","Data":"e076a998490c2497a8cc41872355b6f7716fdb3cda7381312000f31886666dd6"} Sep 29 22:12:13 crc kubenswrapper[4911]: I0929 22:12:13.625758 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-nc626/crc-debug-hzrm2" podStartSLOduration=1.625731609 podStartE2EDuration="1.625731609s" podCreationTimestamp="2025-09-29 22:12:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:12:13.61233638 +0000 UTC m=+2811.589449051" watchObservedRunningTime="2025-09-29 22:12:13.625731609 +0000 UTC m=+2811.602844320" Sep 29 22:12:14 crc kubenswrapper[4911]: I0929 22:12:14.611095 4911 generic.go:334] "Generic (PLEG): container finished" podID="7a54cb03-6eef-4262-a106-b029bb1d7aa4" containerID="9c4ab25c15c61993dc56dc1ea7d4821e780cec2a7f35447da4acf46edc955e5e" exitCode=0 Sep 29 22:12:14 crc kubenswrapper[4911]: I0929 22:12:14.611136 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-nc626/crc-debug-hzrm2" event={"ID":"7a54cb03-6eef-4262-a106-b029bb1d7aa4","Type":"ContainerDied","Data":"9c4ab25c15c61993dc56dc1ea7d4821e780cec2a7f35447da4acf46edc955e5e"} Sep 29 22:12:15 crc kubenswrapper[4911]: I0929 22:12:15.739392 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-nc626/crc-debug-hzrm2" Sep 29 22:12:15 crc kubenswrapper[4911]: I0929 22:12:15.861908 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9c8wx\" (UniqueName: \"kubernetes.io/projected/7a54cb03-6eef-4262-a106-b029bb1d7aa4-kube-api-access-9c8wx\") pod \"7a54cb03-6eef-4262-a106-b029bb1d7aa4\" (UID: \"7a54cb03-6eef-4262-a106-b029bb1d7aa4\") " Sep 29 22:12:15 crc kubenswrapper[4911]: I0929 22:12:15.861963 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7a54cb03-6eef-4262-a106-b029bb1d7aa4-host\") pod \"7a54cb03-6eef-4262-a106-b029bb1d7aa4\" (UID: \"7a54cb03-6eef-4262-a106-b029bb1d7aa4\") " Sep 29 22:12:15 crc kubenswrapper[4911]: I0929 22:12:15.862024 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7a54cb03-6eef-4262-a106-b029bb1d7aa4-host" (OuterVolumeSpecName: "host") pod "7a54cb03-6eef-4262-a106-b029bb1d7aa4" (UID: "7a54cb03-6eef-4262-a106-b029bb1d7aa4"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 22:12:15 crc kubenswrapper[4911]: I0929 22:12:15.862582 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7a54cb03-6eef-4262-a106-b029bb1d7aa4-host\") on node \"crc\" DevicePath \"\"" Sep 29 22:12:15 crc kubenswrapper[4911]: I0929 22:12:15.878976 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a54cb03-6eef-4262-a106-b029bb1d7aa4-kube-api-access-9c8wx" (OuterVolumeSpecName: "kube-api-access-9c8wx") pod "7a54cb03-6eef-4262-a106-b029bb1d7aa4" (UID: "7a54cb03-6eef-4262-a106-b029bb1d7aa4"). InnerVolumeSpecName "kube-api-access-9c8wx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:12:15 crc kubenswrapper[4911]: I0929 22:12:15.963737 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9c8wx\" (UniqueName: \"kubernetes.io/projected/7a54cb03-6eef-4262-a106-b029bb1d7aa4-kube-api-access-9c8wx\") on node \"crc\" DevicePath \"\"" Sep 29 22:12:16 crc kubenswrapper[4911]: I0929 22:12:16.628120 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-nc626/crc-debug-hzrm2" event={"ID":"7a54cb03-6eef-4262-a106-b029bb1d7aa4","Type":"ContainerDied","Data":"e076a998490c2497a8cc41872355b6f7716fdb3cda7381312000f31886666dd6"} Sep 29 22:12:16 crc kubenswrapper[4911]: I0929 22:12:16.628462 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e076a998490c2497a8cc41872355b6f7716fdb3cda7381312000f31886666dd6" Sep 29 22:12:16 crc kubenswrapper[4911]: I0929 22:12:16.628311 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-nc626/crc-debug-hzrm2" Sep 29 22:12:19 crc kubenswrapper[4911]: I0929 22:12:19.883149 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-nc626/crc-debug-hzrm2"] Sep 29 22:12:19 crc kubenswrapper[4911]: I0929 22:12:19.894255 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-nc626/crc-debug-hzrm2"] Sep 29 22:12:20 crc kubenswrapper[4911]: I0929 22:12:20.719356 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7a54cb03-6eef-4262-a106-b029bb1d7aa4" path="/var/lib/kubelet/pods/7a54cb03-6eef-4262-a106-b029bb1d7aa4/volumes" Sep 29 22:12:21 crc kubenswrapper[4911]: I0929 22:12:21.107088 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-nc626/crc-debug-qm7tp"] Sep 29 22:12:21 crc kubenswrapper[4911]: E0929 22:12:21.112332 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a54cb03-6eef-4262-a106-b029bb1d7aa4" containerName="container-00" Sep 29 22:12:21 crc kubenswrapper[4911]: I0929 22:12:21.112385 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a54cb03-6eef-4262-a106-b029bb1d7aa4" containerName="container-00" Sep 29 22:12:21 crc kubenswrapper[4911]: I0929 22:12:21.112890 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a54cb03-6eef-4262-a106-b029bb1d7aa4" containerName="container-00" Sep 29 22:12:21 crc kubenswrapper[4911]: I0929 22:12:21.116888 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-nc626/crc-debug-qm7tp" Sep 29 22:12:21 crc kubenswrapper[4911]: I0929 22:12:21.169261 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b4d9e6f3-178e-4c47-852f-07f06b90fcd8-host\") pod \"crc-debug-qm7tp\" (UID: \"b4d9e6f3-178e-4c47-852f-07f06b90fcd8\") " pod="openshift-must-gather-nc626/crc-debug-qm7tp" Sep 29 22:12:21 crc kubenswrapper[4911]: I0929 22:12:21.169588 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jq8dk\" (UniqueName: \"kubernetes.io/projected/b4d9e6f3-178e-4c47-852f-07f06b90fcd8-kube-api-access-jq8dk\") pod \"crc-debug-qm7tp\" (UID: \"b4d9e6f3-178e-4c47-852f-07f06b90fcd8\") " pod="openshift-must-gather-nc626/crc-debug-qm7tp" Sep 29 22:12:21 crc kubenswrapper[4911]: I0929 22:12:21.271785 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jq8dk\" (UniqueName: \"kubernetes.io/projected/b4d9e6f3-178e-4c47-852f-07f06b90fcd8-kube-api-access-jq8dk\") pod \"crc-debug-qm7tp\" (UID: \"b4d9e6f3-178e-4c47-852f-07f06b90fcd8\") " pod="openshift-must-gather-nc626/crc-debug-qm7tp" Sep 29 22:12:21 crc kubenswrapper[4911]: I0929 22:12:21.272044 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b4d9e6f3-178e-4c47-852f-07f06b90fcd8-host\") pod \"crc-debug-qm7tp\" (UID: \"b4d9e6f3-178e-4c47-852f-07f06b90fcd8\") " pod="openshift-must-gather-nc626/crc-debug-qm7tp" Sep 29 22:12:21 crc kubenswrapper[4911]: I0929 22:12:21.272136 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b4d9e6f3-178e-4c47-852f-07f06b90fcd8-host\") pod \"crc-debug-qm7tp\" (UID: \"b4d9e6f3-178e-4c47-852f-07f06b90fcd8\") " pod="openshift-must-gather-nc626/crc-debug-qm7tp" Sep 29 22:12:21 crc kubenswrapper[4911]: I0929 22:12:21.294302 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jq8dk\" (UniqueName: \"kubernetes.io/projected/b4d9e6f3-178e-4c47-852f-07f06b90fcd8-kube-api-access-jq8dk\") pod \"crc-debug-qm7tp\" (UID: \"b4d9e6f3-178e-4c47-852f-07f06b90fcd8\") " pod="openshift-must-gather-nc626/crc-debug-qm7tp" Sep 29 22:12:21 crc kubenswrapper[4911]: I0929 22:12:21.447490 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-nc626/crc-debug-qm7tp" Sep 29 22:12:21 crc kubenswrapper[4911]: I0929 22:12:21.675406 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-nc626/crc-debug-qm7tp" event={"ID":"b4d9e6f3-178e-4c47-852f-07f06b90fcd8","Type":"ContainerStarted","Data":"fbbdc0fe7378f1222cc1d52f84111f4f9f905365954c77f3a99ba9a33c5bbe4b"} Sep 29 22:12:22 crc kubenswrapper[4911]: I0929 22:12:22.692048 4911 generic.go:334] "Generic (PLEG): container finished" podID="b4d9e6f3-178e-4c47-852f-07f06b90fcd8" containerID="417e20f318c7223002c8506ef1e05a2e9f4ad18f43be472aeaf9042cead936aa" exitCode=0 Sep 29 22:12:22 crc kubenswrapper[4911]: I0929 22:12:22.692249 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-nc626/crc-debug-qm7tp" event={"ID":"b4d9e6f3-178e-4c47-852f-07f06b90fcd8","Type":"ContainerDied","Data":"417e20f318c7223002c8506ef1e05a2e9f4ad18f43be472aeaf9042cead936aa"} Sep 29 22:12:22 crc kubenswrapper[4911]: I0929 22:12:22.752671 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-nc626/crc-debug-qm7tp"] Sep 29 22:12:22 crc kubenswrapper[4911]: I0929 22:12:22.760770 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-nc626/crc-debug-qm7tp"] Sep 29 22:12:23 crc kubenswrapper[4911]: I0929 22:12:23.811030 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-nc626/crc-debug-qm7tp" Sep 29 22:12:23 crc kubenswrapper[4911]: I0929 22:12:23.920728 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b4d9e6f3-178e-4c47-852f-07f06b90fcd8-host\") pod \"b4d9e6f3-178e-4c47-852f-07f06b90fcd8\" (UID: \"b4d9e6f3-178e-4c47-852f-07f06b90fcd8\") " Sep 29 22:12:23 crc kubenswrapper[4911]: I0929 22:12:23.920965 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jq8dk\" (UniqueName: \"kubernetes.io/projected/b4d9e6f3-178e-4c47-852f-07f06b90fcd8-kube-api-access-jq8dk\") pod \"b4d9e6f3-178e-4c47-852f-07f06b90fcd8\" (UID: \"b4d9e6f3-178e-4c47-852f-07f06b90fcd8\") " Sep 29 22:12:23 crc kubenswrapper[4911]: I0929 22:12:23.921985 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b4d9e6f3-178e-4c47-852f-07f06b90fcd8-host" (OuterVolumeSpecName: "host") pod "b4d9e6f3-178e-4c47-852f-07f06b90fcd8" (UID: "b4d9e6f3-178e-4c47-852f-07f06b90fcd8"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 22:12:23 crc kubenswrapper[4911]: I0929 22:12:23.947978 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b4d9e6f3-178e-4c47-852f-07f06b90fcd8-kube-api-access-jq8dk" (OuterVolumeSpecName: "kube-api-access-jq8dk") pod "b4d9e6f3-178e-4c47-852f-07f06b90fcd8" (UID: "b4d9e6f3-178e-4c47-852f-07f06b90fcd8"). InnerVolumeSpecName "kube-api-access-jq8dk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:12:24 crc kubenswrapper[4911]: I0929 22:12:24.023047 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jq8dk\" (UniqueName: \"kubernetes.io/projected/b4d9e6f3-178e-4c47-852f-07f06b90fcd8-kube-api-access-jq8dk\") on node \"crc\" DevicePath \"\"" Sep 29 22:12:24 crc kubenswrapper[4911]: I0929 22:12:24.023084 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b4d9e6f3-178e-4c47-852f-07f06b90fcd8-host\") on node \"crc\" DevicePath \"\"" Sep 29 22:12:24 crc kubenswrapper[4911]: I0929 22:12:24.500234 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_0a88257fefbb0eb964355b1a6a6c00b1c079325d09c0c163900fee4756sb789_fba66f38-d6ca-4922-a098-30d733edfdc1/util/0.log" Sep 29 22:12:24 crc kubenswrapper[4911]: I0929 22:12:24.680126 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_0a88257fefbb0eb964355b1a6a6c00b1c079325d09c0c163900fee4756sb789_fba66f38-d6ca-4922-a098-30d733edfdc1/util/0.log" Sep 29 22:12:24 crc kubenswrapper[4911]: I0929 22:12:24.719336 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-nc626/crc-debug-qm7tp" Sep 29 22:12:24 crc kubenswrapper[4911]: I0929 22:12:24.722586 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b4d9e6f3-178e-4c47-852f-07f06b90fcd8" path="/var/lib/kubelet/pods/b4d9e6f3-178e-4c47-852f-07f06b90fcd8/volumes" Sep 29 22:12:24 crc kubenswrapper[4911]: I0929 22:12:24.723147 4911 scope.go:117] "RemoveContainer" containerID="417e20f318c7223002c8506ef1e05a2e9f4ad18f43be472aeaf9042cead936aa" Sep 29 22:12:24 crc kubenswrapper[4911]: I0929 22:12:24.752756 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_0a88257fefbb0eb964355b1a6a6c00b1c079325d09c0c163900fee4756sb789_fba66f38-d6ca-4922-a098-30d733edfdc1/pull/0.log" Sep 29 22:12:24 crc kubenswrapper[4911]: I0929 22:12:24.762687 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_0a88257fefbb0eb964355b1a6a6c00b1c079325d09c0c163900fee4756sb789_fba66f38-d6ca-4922-a098-30d733edfdc1/pull/0.log" Sep 29 22:12:24 crc kubenswrapper[4911]: I0929 22:12:24.942409 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_0a88257fefbb0eb964355b1a6a6c00b1c079325d09c0c163900fee4756sb789_fba66f38-d6ca-4922-a098-30d733edfdc1/extract/0.log" Sep 29 22:12:24 crc kubenswrapper[4911]: I0929 22:12:24.943085 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_0a88257fefbb0eb964355b1a6a6c00b1c079325d09c0c163900fee4756sb789_fba66f38-d6ca-4922-a098-30d733edfdc1/pull/0.log" Sep 29 22:12:24 crc kubenswrapper[4911]: I0929 22:12:24.954936 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_0a88257fefbb0eb964355b1a6a6c00b1c079325d09c0c163900fee4756sb789_fba66f38-d6ca-4922-a098-30d733edfdc1/util/0.log" Sep 29 22:12:25 crc kubenswrapper[4911]: I0929 22:12:25.108692 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-6ff8b75857-82p2v_a9ab461d-94d1-487f-9854-a5e7e80f88ed/kube-rbac-proxy/0.log" Sep 29 22:12:25 crc kubenswrapper[4911]: I0929 22:12:25.131490 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-644bddb6d8-96r7j_0b825f85-6069-4d69-b1a8-9404542556cb/kube-rbac-proxy/0.log" Sep 29 22:12:25 crc kubenswrapper[4911]: I0929 22:12:25.208221 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-6ff8b75857-82p2v_a9ab461d-94d1-487f-9854-a5e7e80f88ed/manager/0.log" Sep 29 22:12:25 crc kubenswrapper[4911]: I0929 22:12:25.213249 4911 patch_prober.go:28] interesting pod/machine-config-daemon-w647f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 22:12:25 crc kubenswrapper[4911]: I0929 22:12:25.213447 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 22:12:25 crc kubenswrapper[4911]: I0929 22:12:25.303348 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-644bddb6d8-96r7j_0b825f85-6069-4d69-b1a8-9404542556cb/manager/0.log" Sep 29 22:12:25 crc kubenswrapper[4911]: I0929 22:12:25.333334 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-84f4f7b77b-hm6c8_97810e9c-ad62-45d0-a644-1362696f0087/kube-rbac-proxy/0.log" Sep 29 22:12:25 crc kubenswrapper[4911]: I0929 22:12:25.402284 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-84f4f7b77b-hm6c8_97810e9c-ad62-45d0-a644-1362696f0087/manager/0.log" Sep 29 22:12:25 crc kubenswrapper[4911]: I0929 22:12:25.532440 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-84958c4d49-b8mfd_7b9b9966-b82d-481d-9ca9-062a883ffd1c/manager/0.log" Sep 29 22:12:25 crc kubenswrapper[4911]: I0929 22:12:25.545697 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-84958c4d49-b8mfd_7b9b9966-b82d-481d-9ca9-062a883ffd1c/kube-rbac-proxy/0.log" Sep 29 22:12:25 crc kubenswrapper[4911]: I0929 22:12:25.674325 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5d889d78cf-ssgn9_10f051aa-3e26-4a9b-89a1-d5bd8e58ba16/kube-rbac-proxy/0.log" Sep 29 22:12:25 crc kubenswrapper[4911]: I0929 22:12:25.723693 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5d889d78cf-ssgn9_10f051aa-3e26-4a9b-89a1-d5bd8e58ba16/manager/0.log" Sep 29 22:12:25 crc kubenswrapper[4911]: I0929 22:12:25.863498 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-9f4696d94-fgfrh_571a100e-c479-4a69-a8e3-8c7b1abe1bc5/kube-rbac-proxy/0.log" Sep 29 22:12:25 crc kubenswrapper[4911]: I0929 22:12:25.868868 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-9f4696d94-fgfrh_571a100e-c479-4a69-a8e3-8c7b1abe1bc5/manager/0.log" Sep 29 22:12:25 crc kubenswrapper[4911]: I0929 22:12:25.917358 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-7d857cc749-ps6zb_3dff6b09-12d0-462d-8558-175673f2ee0e/kube-rbac-proxy/0.log" Sep 29 22:12:26 crc kubenswrapper[4911]: I0929 22:12:26.104784 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-7975b88857-2rlqk_c30e2b8c-3f72-4e6f-a5e4-ca27d53f5194/kube-rbac-proxy/0.log" Sep 29 22:12:26 crc kubenswrapper[4911]: I0929 22:12:26.155665 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-7975b88857-2rlqk_c30e2b8c-3f72-4e6f-a5e4-ca27d53f5194/manager/0.log" Sep 29 22:12:26 crc kubenswrapper[4911]: I0929 22:12:26.187496 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-7d857cc749-ps6zb_3dff6b09-12d0-462d-8558-175673f2ee0e/manager/0.log" Sep 29 22:12:26 crc kubenswrapper[4911]: I0929 22:12:26.317857 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-5bd55b4bff-cnn7f_0ffadab1-e334-4ec5-8e50-2a589230e880/kube-rbac-proxy/0.log" Sep 29 22:12:26 crc kubenswrapper[4911]: I0929 22:12:26.416187 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-5bd55b4bff-cnn7f_0ffadab1-e334-4ec5-8e50-2a589230e880/manager/0.log" Sep 29 22:12:26 crc kubenswrapper[4911]: I0929 22:12:26.481509 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-6d68dbc695-l6zpg_73f9cfcd-2c46-4070-9734-4ec07c824a9f/manager/0.log" Sep 29 22:12:26 crc kubenswrapper[4911]: I0929 22:12:26.506960 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-6d68dbc695-l6zpg_73f9cfcd-2c46-4070-9734-4ec07c824a9f/kube-rbac-proxy/0.log" Sep 29 22:12:26 crc kubenswrapper[4911]: I0929 22:12:26.608119 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-88c7-zrp9j_66254365-9a3e-4101-beff-6bcbdfe57222/kube-rbac-proxy/0.log" Sep 29 22:12:26 crc kubenswrapper[4911]: I0929 22:12:26.681695 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-88c7-zrp9j_66254365-9a3e-4101-beff-6bcbdfe57222/manager/0.log" Sep 29 22:12:26 crc kubenswrapper[4911]: I0929 22:12:26.752097 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-64d7b59854-dj2hs_89b4d71d-9da4-43af-a8a9-54c89c771c22/kube-rbac-proxy/0.log" Sep 29 22:12:26 crc kubenswrapper[4911]: I0929 22:12:26.807385 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-64d7b59854-dj2hs_89b4d71d-9da4-43af-a8a9-54c89c771c22/manager/0.log" Sep 29 22:12:26 crc kubenswrapper[4911]: I0929 22:12:26.889881 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-c7c776c96-vdx55_09452e7c-7e3f-4ca5-ae5c-c321e2a581ee/kube-rbac-proxy/0.log" Sep 29 22:12:27 crc kubenswrapper[4911]: I0929 22:12:27.011733 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-c7c776c96-vdx55_09452e7c-7e3f-4ca5-ae5c-c321e2a581ee/manager/0.log" Sep 29 22:12:27 crc kubenswrapper[4911]: I0929 22:12:27.067578 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-76fcc6dc7c-lvplw_afd159c8-b67d-46c6-8417-16f505314359/kube-rbac-proxy/0.log" Sep 29 22:12:27 crc kubenswrapper[4911]: I0929 22:12:27.075438 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-76fcc6dc7c-lvplw_afd159c8-b67d-46c6-8417-16f505314359/manager/0.log" Sep 29 22:12:27 crc kubenswrapper[4911]: I0929 22:12:27.176074 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6d776955-j9x29_e6607b81-fac2-4fb0-a19c-a4b01eef9fd2/kube-rbac-proxy/0.log" Sep 29 22:12:27 crc kubenswrapper[4911]: I0929 22:12:27.247261 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6d776955-j9x29_e6607b81-fac2-4fb0-a19c-a4b01eef9fd2/manager/0.log" Sep 29 22:12:27 crc kubenswrapper[4911]: I0929 22:12:27.341765 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-7f894b9c96-4p2kz_2eba2279-aec7-4ad5-83d7-2ffd190b17e6/kube-rbac-proxy/0.log" Sep 29 22:12:27 crc kubenswrapper[4911]: I0929 22:12:27.468293 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-55fbd568cd-t2lpx_c1c21e4d-dd2a-440b-8f66-330667131f49/kube-rbac-proxy/0.log" Sep 29 22:12:27 crc kubenswrapper[4911]: I0929 22:12:27.699758 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-55fbd568cd-t2lpx_c1c21e4d-dd2a-440b-8f66-330667131f49/operator/0.log" Sep 29 22:12:27 crc kubenswrapper[4911]: I0929 22:12:27.708547 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-vcm56_c1e4af9e-39a5-4de6-9c6f-8a131757c680/registry-server/0.log" Sep 29 22:12:27 crc kubenswrapper[4911]: I0929 22:12:27.960065 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-9976ff44c-dbm6k_a9b7bc05-a0dc-421b-9e13-b00f3b8759f2/kube-rbac-proxy/0.log" Sep 29 22:12:27 crc kubenswrapper[4911]: I0929 22:12:27.988472 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-9976ff44c-dbm6k_a9b7bc05-a0dc-421b-9e13-b00f3b8759f2/manager/0.log" Sep 29 22:12:27 crc kubenswrapper[4911]: I0929 22:12:27.995181 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-589c58c6c-zh4ln_9049bfc1-ea91-4483-9f36-3ffaa5f250c7/kube-rbac-proxy/0.log" Sep 29 22:12:28 crc kubenswrapper[4911]: I0929 22:12:28.185064 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-589c58c6c-zh4ln_9049bfc1-ea91-4483-9f36-3ffaa5f250c7/manager/0.log" Sep 29 22:12:28 crc kubenswrapper[4911]: I0929 22:12:28.203498 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-79d8469568-nh8dq_96036e4c-a554-4d6c-8cd8-ef098c91f3a5/operator/0.log" Sep 29 22:12:28 crc kubenswrapper[4911]: I0929 22:12:28.340378 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-bc7dc7bd9-k6296_f86ad598-06ae-4ff1-90fc-1770d3b9797c/kube-rbac-proxy/0.log" Sep 29 22:12:28 crc kubenswrapper[4911]: I0929 22:12:28.372816 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-7f894b9c96-4p2kz_2eba2279-aec7-4ad5-83d7-2ffd190b17e6/manager/0.log" Sep 29 22:12:28 crc kubenswrapper[4911]: I0929 22:12:28.414097 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-bc7dc7bd9-k6296_f86ad598-06ae-4ff1-90fc-1770d3b9797c/manager/0.log" Sep 29 22:12:28 crc kubenswrapper[4911]: I0929 22:12:28.429911 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-66c64d68d6-mn744_e6468afc-47e4-4281-9049-9209a4eb8d73/kube-rbac-proxy/0.log" Sep 29 22:12:28 crc kubenswrapper[4911]: I0929 22:12:28.576226 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-66c64d68d6-mn744_e6468afc-47e4-4281-9049-9209a4eb8d73/manager/0.log" Sep 29 22:12:28 crc kubenswrapper[4911]: I0929 22:12:28.583228 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-f66b554c6-d6xcc_95ec5351-b5ce-417e-ba1c-dcd76592fa6b/kube-rbac-proxy/0.log" Sep 29 22:12:28 crc kubenswrapper[4911]: I0929 22:12:28.590003 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-f66b554c6-d6xcc_95ec5351-b5ce-417e-ba1c-dcd76592fa6b/manager/0.log" Sep 29 22:12:28 crc kubenswrapper[4911]: I0929 22:12:28.712979 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-76669f99c-2gcq4_4365babc-aa2d-4609-880b-f036dc6c367b/kube-rbac-proxy/0.log" Sep 29 22:12:28 crc kubenswrapper[4911]: I0929 22:12:28.742663 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-76669f99c-2gcq4_4365babc-aa2d-4609-880b-f036dc6c367b/manager/0.log" Sep 29 22:12:43 crc kubenswrapper[4911]: I0929 22:12:43.689229 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-bgnq2_36aba054-4229-40fb-8fd4-344cd9f61a40/control-plane-machine-set-operator/0.log" Sep 29 22:12:43 crc kubenswrapper[4911]: I0929 22:12:43.858615 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-2flr8_d55a1ae9-2e28-49f6-904b-67a246fda7e6/kube-rbac-proxy/0.log" Sep 29 22:12:43 crc kubenswrapper[4911]: I0929 22:12:43.915329 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-2flr8_d55a1ae9-2e28-49f6-904b-67a246fda7e6/machine-api-operator/0.log" Sep 29 22:12:53 crc kubenswrapper[4911]: I0929 22:12:53.743521 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-cmbsm"] Sep 29 22:12:53 crc kubenswrapper[4911]: E0929 22:12:53.744771 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4d9e6f3-178e-4c47-852f-07f06b90fcd8" containerName="container-00" Sep 29 22:12:53 crc kubenswrapper[4911]: I0929 22:12:53.744818 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4d9e6f3-178e-4c47-852f-07f06b90fcd8" containerName="container-00" Sep 29 22:12:53 crc kubenswrapper[4911]: I0929 22:12:53.745111 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="b4d9e6f3-178e-4c47-852f-07f06b90fcd8" containerName="container-00" Sep 29 22:12:53 crc kubenswrapper[4911]: I0929 22:12:53.747304 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cmbsm" Sep 29 22:12:53 crc kubenswrapper[4911]: I0929 22:12:53.763043 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-cmbsm"] Sep 29 22:12:53 crc kubenswrapper[4911]: I0929 22:12:53.884592 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rjfjj\" (UniqueName: \"kubernetes.io/projected/ae3c6a09-9b3d-42ea-bffb-dd34c90fe8dc-kube-api-access-rjfjj\") pod \"community-operators-cmbsm\" (UID: \"ae3c6a09-9b3d-42ea-bffb-dd34c90fe8dc\") " pod="openshift-marketplace/community-operators-cmbsm" Sep 29 22:12:53 crc kubenswrapper[4911]: I0929 22:12:53.884731 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ae3c6a09-9b3d-42ea-bffb-dd34c90fe8dc-utilities\") pod \"community-operators-cmbsm\" (UID: \"ae3c6a09-9b3d-42ea-bffb-dd34c90fe8dc\") " pod="openshift-marketplace/community-operators-cmbsm" Sep 29 22:12:53 crc kubenswrapper[4911]: I0929 22:12:53.884782 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ae3c6a09-9b3d-42ea-bffb-dd34c90fe8dc-catalog-content\") pod \"community-operators-cmbsm\" (UID: \"ae3c6a09-9b3d-42ea-bffb-dd34c90fe8dc\") " pod="openshift-marketplace/community-operators-cmbsm" Sep 29 22:12:53 crc kubenswrapper[4911]: I0929 22:12:53.986187 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rjfjj\" (UniqueName: \"kubernetes.io/projected/ae3c6a09-9b3d-42ea-bffb-dd34c90fe8dc-kube-api-access-rjfjj\") pod \"community-operators-cmbsm\" (UID: \"ae3c6a09-9b3d-42ea-bffb-dd34c90fe8dc\") " pod="openshift-marketplace/community-operators-cmbsm" Sep 29 22:12:53 crc kubenswrapper[4911]: I0929 22:12:53.986273 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ae3c6a09-9b3d-42ea-bffb-dd34c90fe8dc-utilities\") pod \"community-operators-cmbsm\" (UID: \"ae3c6a09-9b3d-42ea-bffb-dd34c90fe8dc\") " pod="openshift-marketplace/community-operators-cmbsm" Sep 29 22:12:53 crc kubenswrapper[4911]: I0929 22:12:53.986306 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ae3c6a09-9b3d-42ea-bffb-dd34c90fe8dc-catalog-content\") pod \"community-operators-cmbsm\" (UID: \"ae3c6a09-9b3d-42ea-bffb-dd34c90fe8dc\") " pod="openshift-marketplace/community-operators-cmbsm" Sep 29 22:12:53 crc kubenswrapper[4911]: I0929 22:12:53.986778 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ae3c6a09-9b3d-42ea-bffb-dd34c90fe8dc-catalog-content\") pod \"community-operators-cmbsm\" (UID: \"ae3c6a09-9b3d-42ea-bffb-dd34c90fe8dc\") " pod="openshift-marketplace/community-operators-cmbsm" Sep 29 22:12:53 crc kubenswrapper[4911]: I0929 22:12:53.986931 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ae3c6a09-9b3d-42ea-bffb-dd34c90fe8dc-utilities\") pod \"community-operators-cmbsm\" (UID: \"ae3c6a09-9b3d-42ea-bffb-dd34c90fe8dc\") " pod="openshift-marketplace/community-operators-cmbsm" Sep 29 22:12:54 crc kubenswrapper[4911]: I0929 22:12:54.004316 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rjfjj\" (UniqueName: \"kubernetes.io/projected/ae3c6a09-9b3d-42ea-bffb-dd34c90fe8dc-kube-api-access-rjfjj\") pod \"community-operators-cmbsm\" (UID: \"ae3c6a09-9b3d-42ea-bffb-dd34c90fe8dc\") " pod="openshift-marketplace/community-operators-cmbsm" Sep 29 22:12:54 crc kubenswrapper[4911]: I0929 22:12:54.112053 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cmbsm" Sep 29 22:12:54 crc kubenswrapper[4911]: I0929 22:12:54.639394 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-cmbsm"] Sep 29 22:12:55 crc kubenswrapper[4911]: I0929 22:12:55.004223 4911 generic.go:334] "Generic (PLEG): container finished" podID="ae3c6a09-9b3d-42ea-bffb-dd34c90fe8dc" containerID="6cfc8fe2e6f5642c863afceff3d5dabf8439176dd542be715766631bf0880ba2" exitCode=0 Sep 29 22:12:55 crc kubenswrapper[4911]: I0929 22:12:55.006069 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cmbsm" event={"ID":"ae3c6a09-9b3d-42ea-bffb-dd34c90fe8dc","Type":"ContainerDied","Data":"6cfc8fe2e6f5642c863afceff3d5dabf8439176dd542be715766631bf0880ba2"} Sep 29 22:12:55 crc kubenswrapper[4911]: I0929 22:12:55.006097 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cmbsm" event={"ID":"ae3c6a09-9b3d-42ea-bffb-dd34c90fe8dc","Type":"ContainerStarted","Data":"be81cd78eba067074e28cac97d2da36e516f9d0f33455d12d8bbaeec6f10fc3c"} Sep 29 22:12:55 crc kubenswrapper[4911]: I0929 22:12:55.007540 4911 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 22:12:55 crc kubenswrapper[4911]: I0929 22:12:55.210889 4911 patch_prober.go:28] interesting pod/machine-config-daemon-w647f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 22:12:55 crc kubenswrapper[4911]: I0929 22:12:55.210950 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 22:12:55 crc kubenswrapper[4911]: I0929 22:12:55.210995 4911 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-w647f" Sep 29 22:12:55 crc kubenswrapper[4911]: I0929 22:12:55.211854 4911 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"fed63abb46220bf90b5156f960830a58b0cc2be8812d554b2887537e4211823f"} pod="openshift-machine-config-operator/machine-config-daemon-w647f" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 22:12:55 crc kubenswrapper[4911]: I0929 22:12:55.211913 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" containerName="machine-config-daemon" containerID="cri-o://fed63abb46220bf90b5156f960830a58b0cc2be8812d554b2887537e4211823f" gracePeriod=600 Sep 29 22:12:55 crc kubenswrapper[4911]: I0929 22:12:55.325248 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-7sbqq_e997c16b-21df-4c5b-89e3-f45fec29191e/cert-manager-controller/0.log" Sep 29 22:12:55 crc kubenswrapper[4911]: E0929 22:12:55.353019 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w647f_openshift-machine-config-operator(50640abc-40db-4390-82d1-f3cfc76da71c)\"" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" Sep 29 22:12:55 crc kubenswrapper[4911]: I0929 22:12:55.492265 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-vlpxx_77a3df58-e35e-45ad-a5bc-6fb3841ec955/cert-manager-cainjector/0.log" Sep 29 22:12:55 crc kubenswrapper[4911]: I0929 22:12:55.537169 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-sglg6_ee5b7990-71bd-4b37-8fa1-aaa3b1284320/cert-manager-webhook/0.log" Sep 29 22:12:56 crc kubenswrapper[4911]: I0929 22:12:56.016526 4911 generic.go:334] "Generic (PLEG): container finished" podID="50640abc-40db-4390-82d1-f3cfc76da71c" containerID="fed63abb46220bf90b5156f960830a58b0cc2be8812d554b2887537e4211823f" exitCode=0 Sep 29 22:12:56 crc kubenswrapper[4911]: I0929 22:12:56.016635 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w647f" event={"ID":"50640abc-40db-4390-82d1-f3cfc76da71c","Type":"ContainerDied","Data":"fed63abb46220bf90b5156f960830a58b0cc2be8812d554b2887537e4211823f"} Sep 29 22:12:56 crc kubenswrapper[4911]: I0929 22:12:56.016878 4911 scope.go:117] "RemoveContainer" containerID="7be79346af5955e6a8de71a5a3427d7157502674786602aada446da1543ebd80" Sep 29 22:12:56 crc kubenswrapper[4911]: I0929 22:12:56.017554 4911 scope.go:117] "RemoveContainer" containerID="fed63abb46220bf90b5156f960830a58b0cc2be8812d554b2887537e4211823f" Sep 29 22:12:56 crc kubenswrapper[4911]: E0929 22:12:56.017884 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w647f_openshift-machine-config-operator(50640abc-40db-4390-82d1-f3cfc76da71c)\"" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" Sep 29 22:12:56 crc kubenswrapper[4911]: I0929 22:12:56.019257 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cmbsm" event={"ID":"ae3c6a09-9b3d-42ea-bffb-dd34c90fe8dc","Type":"ContainerStarted","Data":"0d5d7ec2f8cd0ea1efd2b7323c54fa8f819a024278f22edaa75b79f8fb1f0160"} Sep 29 22:12:57 crc kubenswrapper[4911]: I0929 22:12:57.034441 4911 generic.go:334] "Generic (PLEG): container finished" podID="ae3c6a09-9b3d-42ea-bffb-dd34c90fe8dc" containerID="0d5d7ec2f8cd0ea1efd2b7323c54fa8f819a024278f22edaa75b79f8fb1f0160" exitCode=0 Sep 29 22:12:57 crc kubenswrapper[4911]: I0929 22:12:57.034488 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cmbsm" event={"ID":"ae3c6a09-9b3d-42ea-bffb-dd34c90fe8dc","Type":"ContainerDied","Data":"0d5d7ec2f8cd0ea1efd2b7323c54fa8f819a024278f22edaa75b79f8fb1f0160"} Sep 29 22:12:58 crc kubenswrapper[4911]: I0929 22:12:58.050207 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cmbsm" event={"ID":"ae3c6a09-9b3d-42ea-bffb-dd34c90fe8dc","Type":"ContainerStarted","Data":"653d3ce894aeece16cae8c48ecdea2363ea0b3e2d54d830187a067ebf4d4a9ff"} Sep 29 22:12:58 crc kubenswrapper[4911]: I0929 22:12:58.074150 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-cmbsm" podStartSLOduration=2.567995745 podStartE2EDuration="5.074133554s" podCreationTimestamp="2025-09-29 22:12:53 +0000 UTC" firstStartedPulling="2025-09-29 22:12:55.007354091 +0000 UTC m=+2852.984466762" lastFinishedPulling="2025-09-29 22:12:57.5134919 +0000 UTC m=+2855.490604571" observedRunningTime="2025-09-29 22:12:58.072118951 +0000 UTC m=+2856.049231622" watchObservedRunningTime="2025-09-29 22:12:58.074133554 +0000 UTC m=+2856.051246225" Sep 29 22:13:04 crc kubenswrapper[4911]: I0929 22:13:04.123079 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-cmbsm" Sep 29 22:13:04 crc kubenswrapper[4911]: I0929 22:13:04.123654 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-cmbsm" Sep 29 22:13:04 crc kubenswrapper[4911]: I0929 22:13:04.195902 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-cmbsm" Sep 29 22:13:05 crc kubenswrapper[4911]: I0929 22:13:05.174490 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-cmbsm" Sep 29 22:13:05 crc kubenswrapper[4911]: I0929 22:13:05.228178 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-cmbsm"] Sep 29 22:13:07 crc kubenswrapper[4911]: I0929 22:13:07.144653 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-cmbsm" podUID="ae3c6a09-9b3d-42ea-bffb-dd34c90fe8dc" containerName="registry-server" containerID="cri-o://653d3ce894aeece16cae8c48ecdea2363ea0b3e2d54d830187a067ebf4d4a9ff" gracePeriod=2 Sep 29 22:13:07 crc kubenswrapper[4911]: I0929 22:13:07.174847 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-864bb6dfb5-rqglt_603ee12c-cc36-4dd0-af9b-efa00c50712b/nmstate-console-plugin/0.log" Sep 29 22:13:07 crc kubenswrapper[4911]: I0929 22:13:07.323178 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-m57sm_85f29bf7-4f84-4317-ac7e-b8724401c99f/nmstate-handler/0.log" Sep 29 22:13:07 crc kubenswrapper[4911]: I0929 22:13:07.334661 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-58fcddf996-jmsmg_0ae2d175-d9c4-4a66-9747-fd82fef23890/kube-rbac-proxy/0.log" Sep 29 22:13:07 crc kubenswrapper[4911]: I0929 22:13:07.403336 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-58fcddf996-jmsmg_0ae2d175-d9c4-4a66-9747-fd82fef23890/nmstate-metrics/0.log" Sep 29 22:13:07 crc kubenswrapper[4911]: I0929 22:13:07.541622 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5d6f6cfd66-rzfn8_46d5f12f-493e-4199-a9ce-649275408eff/nmstate-operator/0.log" Sep 29 22:13:07 crc kubenswrapper[4911]: I0929 22:13:07.615230 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cmbsm" Sep 29 22:13:07 crc kubenswrapper[4911]: I0929 22:13:07.643142 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rjfjj\" (UniqueName: \"kubernetes.io/projected/ae3c6a09-9b3d-42ea-bffb-dd34c90fe8dc-kube-api-access-rjfjj\") pod \"ae3c6a09-9b3d-42ea-bffb-dd34c90fe8dc\" (UID: \"ae3c6a09-9b3d-42ea-bffb-dd34c90fe8dc\") " Sep 29 22:13:07 crc kubenswrapper[4911]: I0929 22:13:07.643282 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ae3c6a09-9b3d-42ea-bffb-dd34c90fe8dc-utilities\") pod \"ae3c6a09-9b3d-42ea-bffb-dd34c90fe8dc\" (UID: \"ae3c6a09-9b3d-42ea-bffb-dd34c90fe8dc\") " Sep 29 22:13:07 crc kubenswrapper[4911]: I0929 22:13:07.643337 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ae3c6a09-9b3d-42ea-bffb-dd34c90fe8dc-catalog-content\") pod \"ae3c6a09-9b3d-42ea-bffb-dd34c90fe8dc\" (UID: \"ae3c6a09-9b3d-42ea-bffb-dd34c90fe8dc\") " Sep 29 22:13:07 crc kubenswrapper[4911]: I0929 22:13:07.644212 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ae3c6a09-9b3d-42ea-bffb-dd34c90fe8dc-utilities" (OuterVolumeSpecName: "utilities") pod "ae3c6a09-9b3d-42ea-bffb-dd34c90fe8dc" (UID: "ae3c6a09-9b3d-42ea-bffb-dd34c90fe8dc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:13:07 crc kubenswrapper[4911]: I0929 22:13:07.649430 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ae3c6a09-9b3d-42ea-bffb-dd34c90fe8dc-kube-api-access-rjfjj" (OuterVolumeSpecName: "kube-api-access-rjfjj") pod "ae3c6a09-9b3d-42ea-bffb-dd34c90fe8dc" (UID: "ae3c6a09-9b3d-42ea-bffb-dd34c90fe8dc"). InnerVolumeSpecName "kube-api-access-rjfjj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:13:07 crc kubenswrapper[4911]: I0929 22:13:07.657324 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-6d689559c5-qdgf7_734625b7-c050-470e-92b7-6a4ab5de695e/nmstate-webhook/0.log" Sep 29 22:13:07 crc kubenswrapper[4911]: I0929 22:13:07.696277 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ae3c6a09-9b3d-42ea-bffb-dd34c90fe8dc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ae3c6a09-9b3d-42ea-bffb-dd34c90fe8dc" (UID: "ae3c6a09-9b3d-42ea-bffb-dd34c90fe8dc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:13:07 crc kubenswrapper[4911]: I0929 22:13:07.745654 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rjfjj\" (UniqueName: \"kubernetes.io/projected/ae3c6a09-9b3d-42ea-bffb-dd34c90fe8dc-kube-api-access-rjfjj\") on node \"crc\" DevicePath \"\"" Sep 29 22:13:07 crc kubenswrapper[4911]: I0929 22:13:07.745692 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ae3c6a09-9b3d-42ea-bffb-dd34c90fe8dc-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 22:13:07 crc kubenswrapper[4911]: I0929 22:13:07.745705 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ae3c6a09-9b3d-42ea-bffb-dd34c90fe8dc-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 22:13:08 crc kubenswrapper[4911]: I0929 22:13:08.166837 4911 generic.go:334] "Generic (PLEG): container finished" podID="ae3c6a09-9b3d-42ea-bffb-dd34c90fe8dc" containerID="653d3ce894aeece16cae8c48ecdea2363ea0b3e2d54d830187a067ebf4d4a9ff" exitCode=0 Sep 29 22:13:08 crc kubenswrapper[4911]: I0929 22:13:08.166895 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cmbsm" event={"ID":"ae3c6a09-9b3d-42ea-bffb-dd34c90fe8dc","Type":"ContainerDied","Data":"653d3ce894aeece16cae8c48ecdea2363ea0b3e2d54d830187a067ebf4d4a9ff"} Sep 29 22:13:08 crc kubenswrapper[4911]: I0929 22:13:08.166951 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cmbsm" event={"ID":"ae3c6a09-9b3d-42ea-bffb-dd34c90fe8dc","Type":"ContainerDied","Data":"be81cd78eba067074e28cac97d2da36e516f9d0f33455d12d8bbaeec6f10fc3c"} Sep 29 22:13:08 crc kubenswrapper[4911]: I0929 22:13:08.166974 4911 scope.go:117] "RemoveContainer" containerID="653d3ce894aeece16cae8c48ecdea2363ea0b3e2d54d830187a067ebf4d4a9ff" Sep 29 22:13:08 crc kubenswrapper[4911]: I0929 22:13:08.166917 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cmbsm" Sep 29 22:13:08 crc kubenswrapper[4911]: I0929 22:13:08.199314 4911 scope.go:117] "RemoveContainer" containerID="0d5d7ec2f8cd0ea1efd2b7323c54fa8f819a024278f22edaa75b79f8fb1f0160" Sep 29 22:13:08 crc kubenswrapper[4911]: I0929 22:13:08.205251 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-cmbsm"] Sep 29 22:13:08 crc kubenswrapper[4911]: I0929 22:13:08.213952 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-cmbsm"] Sep 29 22:13:08 crc kubenswrapper[4911]: I0929 22:13:08.254321 4911 scope.go:117] "RemoveContainer" containerID="6cfc8fe2e6f5642c863afceff3d5dabf8439176dd542be715766631bf0880ba2" Sep 29 22:13:08 crc kubenswrapper[4911]: I0929 22:13:08.280662 4911 scope.go:117] "RemoveContainer" containerID="653d3ce894aeece16cae8c48ecdea2363ea0b3e2d54d830187a067ebf4d4a9ff" Sep 29 22:13:08 crc kubenswrapper[4911]: E0929 22:13:08.281192 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"653d3ce894aeece16cae8c48ecdea2363ea0b3e2d54d830187a067ebf4d4a9ff\": container with ID starting with 653d3ce894aeece16cae8c48ecdea2363ea0b3e2d54d830187a067ebf4d4a9ff not found: ID does not exist" containerID="653d3ce894aeece16cae8c48ecdea2363ea0b3e2d54d830187a067ebf4d4a9ff" Sep 29 22:13:08 crc kubenswrapper[4911]: I0929 22:13:08.281237 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"653d3ce894aeece16cae8c48ecdea2363ea0b3e2d54d830187a067ebf4d4a9ff"} err="failed to get container status \"653d3ce894aeece16cae8c48ecdea2363ea0b3e2d54d830187a067ebf4d4a9ff\": rpc error: code = NotFound desc = could not find container \"653d3ce894aeece16cae8c48ecdea2363ea0b3e2d54d830187a067ebf4d4a9ff\": container with ID starting with 653d3ce894aeece16cae8c48ecdea2363ea0b3e2d54d830187a067ebf4d4a9ff not found: ID does not exist" Sep 29 22:13:08 crc kubenswrapper[4911]: I0929 22:13:08.281263 4911 scope.go:117] "RemoveContainer" containerID="0d5d7ec2f8cd0ea1efd2b7323c54fa8f819a024278f22edaa75b79f8fb1f0160" Sep 29 22:13:08 crc kubenswrapper[4911]: E0929 22:13:08.281948 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0d5d7ec2f8cd0ea1efd2b7323c54fa8f819a024278f22edaa75b79f8fb1f0160\": container with ID starting with 0d5d7ec2f8cd0ea1efd2b7323c54fa8f819a024278f22edaa75b79f8fb1f0160 not found: ID does not exist" containerID="0d5d7ec2f8cd0ea1efd2b7323c54fa8f819a024278f22edaa75b79f8fb1f0160" Sep 29 22:13:08 crc kubenswrapper[4911]: I0929 22:13:08.281990 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0d5d7ec2f8cd0ea1efd2b7323c54fa8f819a024278f22edaa75b79f8fb1f0160"} err="failed to get container status \"0d5d7ec2f8cd0ea1efd2b7323c54fa8f819a024278f22edaa75b79f8fb1f0160\": rpc error: code = NotFound desc = could not find container \"0d5d7ec2f8cd0ea1efd2b7323c54fa8f819a024278f22edaa75b79f8fb1f0160\": container with ID starting with 0d5d7ec2f8cd0ea1efd2b7323c54fa8f819a024278f22edaa75b79f8fb1f0160 not found: ID does not exist" Sep 29 22:13:08 crc kubenswrapper[4911]: I0929 22:13:08.282016 4911 scope.go:117] "RemoveContainer" containerID="6cfc8fe2e6f5642c863afceff3d5dabf8439176dd542be715766631bf0880ba2" Sep 29 22:13:08 crc kubenswrapper[4911]: E0929 22:13:08.282413 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6cfc8fe2e6f5642c863afceff3d5dabf8439176dd542be715766631bf0880ba2\": container with ID starting with 6cfc8fe2e6f5642c863afceff3d5dabf8439176dd542be715766631bf0880ba2 not found: ID does not exist" containerID="6cfc8fe2e6f5642c863afceff3d5dabf8439176dd542be715766631bf0880ba2" Sep 29 22:13:08 crc kubenswrapper[4911]: I0929 22:13:08.282436 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6cfc8fe2e6f5642c863afceff3d5dabf8439176dd542be715766631bf0880ba2"} err="failed to get container status \"6cfc8fe2e6f5642c863afceff3d5dabf8439176dd542be715766631bf0880ba2\": rpc error: code = NotFound desc = could not find container \"6cfc8fe2e6f5642c863afceff3d5dabf8439176dd542be715766631bf0880ba2\": container with ID starting with 6cfc8fe2e6f5642c863afceff3d5dabf8439176dd542be715766631bf0880ba2 not found: ID does not exist" Sep 29 22:13:08 crc kubenswrapper[4911]: I0929 22:13:08.715994 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ae3c6a09-9b3d-42ea-bffb-dd34c90fe8dc" path="/var/lib/kubelet/pods/ae3c6a09-9b3d-42ea-bffb-dd34c90fe8dc/volumes" Sep 29 22:13:10 crc kubenswrapper[4911]: I0929 22:13:10.701422 4911 scope.go:117] "RemoveContainer" containerID="fed63abb46220bf90b5156f960830a58b0cc2be8812d554b2887537e4211823f" Sep 29 22:13:10 crc kubenswrapper[4911]: E0929 22:13:10.701920 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w647f_openshift-machine-config-operator(50640abc-40db-4390-82d1-f3cfc76da71c)\"" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" Sep 29 22:13:19 crc kubenswrapper[4911]: I0929 22:13:19.425068 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-7797b598cb-5xvfp_3ded07bd-2737-4d5d-8265-6c2e38f653d8/manager/0.log" Sep 29 22:13:19 crc kubenswrapper[4911]: I0929 22:13:19.441710 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-7797b598cb-5xvfp_3ded07bd-2737-4d5d-8265-6c2e38f653d8/kube-rbac-proxy/0.log" Sep 29 22:13:22 crc kubenswrapper[4911]: I0929 22:13:22.722063 4911 scope.go:117] "RemoveContainer" containerID="fed63abb46220bf90b5156f960830a58b0cc2be8812d554b2887537e4211823f" Sep 29 22:13:22 crc kubenswrapper[4911]: E0929 22:13:22.722666 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w647f_openshift-machine-config-operator(50640abc-40db-4390-82d1-f3cfc76da71c)\"" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" Sep 29 22:13:31 crc kubenswrapper[4911]: I0929 22:13:31.552013 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_cluster-logging-operator-fcc886d58-nw5qw_ba2c473e-d26f-435d-b673-24026f131de8/cluster-logging-operator/0.log" Sep 29 22:13:31 crc kubenswrapper[4911]: I0929 22:13:31.742127 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_collector-7kzkd_c102400c-fbea-45f2-a201-f7bae363f052/collector/0.log" Sep 29 22:13:37 crc kubenswrapper[4911]: I0929 22:13:37.701825 4911 scope.go:117] "RemoveContainer" containerID="fed63abb46220bf90b5156f960830a58b0cc2be8812d554b2887537e4211823f" Sep 29 22:13:37 crc kubenswrapper[4911]: E0929 22:13:37.702646 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w647f_openshift-machine-config-operator(50640abc-40db-4390-82d1-f3cfc76da71c)\"" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" Sep 29 22:13:43 crc kubenswrapper[4911]: I0929 22:13:43.084908 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5d688f5ffc-xxmg2_fef69310-fa8e-4fa6-b35c-1347023377d8/kube-rbac-proxy/0.log" Sep 29 22:13:43 crc kubenswrapper[4911]: I0929 22:13:43.130460 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5d688f5ffc-xxmg2_fef69310-fa8e-4fa6-b35c-1347023377d8/controller/0.log" Sep 29 22:13:43 crc kubenswrapper[4911]: I0929 22:13:43.267626 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-lj8ph_cb4339ee-098d-428e-89a8-d57aec12356c/cp-frr-files/0.log" Sep 29 22:13:43 crc kubenswrapper[4911]: I0929 22:13:43.457348 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-lj8ph_cb4339ee-098d-428e-89a8-d57aec12356c/cp-reloader/0.log" Sep 29 22:13:43 crc kubenswrapper[4911]: I0929 22:13:43.471754 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-lj8ph_cb4339ee-098d-428e-89a8-d57aec12356c/cp-reloader/0.log" Sep 29 22:13:43 crc kubenswrapper[4911]: I0929 22:13:43.490955 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-lj8ph_cb4339ee-098d-428e-89a8-d57aec12356c/cp-frr-files/0.log" Sep 29 22:13:43 crc kubenswrapper[4911]: I0929 22:13:43.495980 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-lj8ph_cb4339ee-098d-428e-89a8-d57aec12356c/cp-metrics/0.log" Sep 29 22:13:43 crc kubenswrapper[4911]: I0929 22:13:43.627031 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-lj8ph_cb4339ee-098d-428e-89a8-d57aec12356c/cp-frr-files/0.log" Sep 29 22:13:43 crc kubenswrapper[4911]: I0929 22:13:43.657852 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-lj8ph_cb4339ee-098d-428e-89a8-d57aec12356c/cp-reloader/0.log" Sep 29 22:13:43 crc kubenswrapper[4911]: I0929 22:13:43.687760 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-lj8ph_cb4339ee-098d-428e-89a8-d57aec12356c/cp-metrics/0.log" Sep 29 22:13:43 crc kubenswrapper[4911]: I0929 22:13:43.688086 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-lj8ph_cb4339ee-098d-428e-89a8-d57aec12356c/cp-metrics/0.log" Sep 29 22:13:43 crc kubenswrapper[4911]: I0929 22:13:43.849654 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-lj8ph_cb4339ee-098d-428e-89a8-d57aec12356c/cp-frr-files/0.log" Sep 29 22:13:43 crc kubenswrapper[4911]: I0929 22:13:43.878418 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-lj8ph_cb4339ee-098d-428e-89a8-d57aec12356c/cp-reloader/0.log" Sep 29 22:13:43 crc kubenswrapper[4911]: I0929 22:13:43.897809 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-lj8ph_cb4339ee-098d-428e-89a8-d57aec12356c/cp-metrics/0.log" Sep 29 22:13:43 crc kubenswrapper[4911]: I0929 22:13:43.918014 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-lj8ph_cb4339ee-098d-428e-89a8-d57aec12356c/controller/0.log" Sep 29 22:13:44 crc kubenswrapper[4911]: I0929 22:13:44.080028 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-lj8ph_cb4339ee-098d-428e-89a8-d57aec12356c/frr-metrics/0.log" Sep 29 22:13:44 crc kubenswrapper[4911]: I0929 22:13:44.113906 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-lj8ph_cb4339ee-098d-428e-89a8-d57aec12356c/kube-rbac-proxy/0.log" Sep 29 22:13:44 crc kubenswrapper[4911]: I0929 22:13:44.145869 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-lj8ph_cb4339ee-098d-428e-89a8-d57aec12356c/kube-rbac-proxy-frr/0.log" Sep 29 22:13:44 crc kubenswrapper[4911]: I0929 22:13:44.346366 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-lj8ph_cb4339ee-098d-428e-89a8-d57aec12356c/reloader/0.log" Sep 29 22:13:44 crc kubenswrapper[4911]: I0929 22:13:44.396314 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-5478bdb765-5ztnx_000d716a-9bed-4422-8a16-8598ff854239/frr-k8s-webhook-server/0.log" Sep 29 22:13:44 crc kubenswrapper[4911]: I0929 22:13:44.638206 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-5bf9d4f487-rzrcz_305ebfd8-5281-4a0f-9f5d-57db4028fa54/manager/0.log" Sep 29 22:13:44 crc kubenswrapper[4911]: I0929 22:13:44.766275 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-6654f57795-4tpgk_72686e5d-383c-4860-bc1a-fb31a11f900d/webhook-server/0.log" Sep 29 22:13:44 crc kubenswrapper[4911]: I0929 22:13:44.868980 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-jxhnw_a428871b-77b8-46c9-8886-fa3eb5b2e108/kube-rbac-proxy/0.log" Sep 29 22:13:45 crc kubenswrapper[4911]: I0929 22:13:45.274855 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-lj8ph_cb4339ee-098d-428e-89a8-d57aec12356c/frr/0.log" Sep 29 22:13:45 crc kubenswrapper[4911]: I0929 22:13:45.310470 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-jxhnw_a428871b-77b8-46c9-8886-fa3eb5b2e108/speaker/0.log" Sep 29 22:13:50 crc kubenswrapper[4911]: I0929 22:13:50.701164 4911 scope.go:117] "RemoveContainer" containerID="fed63abb46220bf90b5156f960830a58b0cc2be8812d554b2887537e4211823f" Sep 29 22:13:50 crc kubenswrapper[4911]: E0929 22:13:50.702248 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w647f_openshift-machine-config-operator(50640abc-40db-4390-82d1-f3cfc76da71c)\"" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" Sep 29 22:13:56 crc kubenswrapper[4911]: I0929 22:13:56.958984 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694dj5gp_135c9e3e-6b82-4a96-8684-ba2d8b9fe7f3/util/0.log" Sep 29 22:13:57 crc kubenswrapper[4911]: I0929 22:13:57.172896 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694dj5gp_135c9e3e-6b82-4a96-8684-ba2d8b9fe7f3/util/0.log" Sep 29 22:13:57 crc kubenswrapper[4911]: I0929 22:13:57.197399 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694dj5gp_135c9e3e-6b82-4a96-8684-ba2d8b9fe7f3/pull/0.log" Sep 29 22:13:57 crc kubenswrapper[4911]: I0929 22:13:57.202800 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694dj5gp_135c9e3e-6b82-4a96-8684-ba2d8b9fe7f3/pull/0.log" Sep 29 22:13:57 crc kubenswrapper[4911]: I0929 22:13:57.363925 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694dj5gp_135c9e3e-6b82-4a96-8684-ba2d8b9fe7f3/pull/0.log" Sep 29 22:13:57 crc kubenswrapper[4911]: I0929 22:13:57.425324 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694dj5gp_135c9e3e-6b82-4a96-8684-ba2d8b9fe7f3/extract/0.log" Sep 29 22:13:57 crc kubenswrapper[4911]: I0929 22:13:57.427396 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694dj5gp_135c9e3e-6b82-4a96-8684-ba2d8b9fe7f3/util/0.log" Sep 29 22:13:57 crc kubenswrapper[4911]: I0929 22:13:57.531232 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc28wnr_629693d2-2856-4eb7-9abc-dbee25234329/util/0.log" Sep 29 22:13:57 crc kubenswrapper[4911]: I0929 22:13:57.976991 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc28wnr_629693d2-2856-4eb7-9abc-dbee25234329/pull/0.log" Sep 29 22:13:57 crc kubenswrapper[4911]: I0929 22:13:57.984202 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc28wnr_629693d2-2856-4eb7-9abc-dbee25234329/util/0.log" Sep 29 22:13:58 crc kubenswrapper[4911]: I0929 22:13:58.029565 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc28wnr_629693d2-2856-4eb7-9abc-dbee25234329/pull/0.log" Sep 29 22:13:58 crc kubenswrapper[4911]: I0929 22:13:58.165293 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc28wnr_629693d2-2856-4eb7-9abc-dbee25234329/extract/0.log" Sep 29 22:13:58 crc kubenswrapper[4911]: I0929 22:13:58.223555 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc28wnr_629693d2-2856-4eb7-9abc-dbee25234329/pull/0.log" Sep 29 22:13:58 crc kubenswrapper[4911]: I0929 22:13:58.262215 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc28wnr_629693d2-2856-4eb7-9abc-dbee25234329/util/0.log" Sep 29 22:13:58 crc kubenswrapper[4911]: I0929 22:13:58.372261 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d7cbwj_aa9a2254-fecc-4d77-bd00-c665acb7efe2/util/0.log" Sep 29 22:13:58 crc kubenswrapper[4911]: I0929 22:13:58.532150 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d7cbwj_aa9a2254-fecc-4d77-bd00-c665acb7efe2/pull/0.log" Sep 29 22:13:58 crc kubenswrapper[4911]: I0929 22:13:58.535757 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d7cbwj_aa9a2254-fecc-4d77-bd00-c665acb7efe2/util/0.log" Sep 29 22:13:58 crc kubenswrapper[4911]: I0929 22:13:58.574433 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d7cbwj_aa9a2254-fecc-4d77-bd00-c665acb7efe2/pull/0.log" Sep 29 22:13:58 crc kubenswrapper[4911]: I0929 22:13:58.732438 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d7cbwj_aa9a2254-fecc-4d77-bd00-c665acb7efe2/pull/0.log" Sep 29 22:13:58 crc kubenswrapper[4911]: I0929 22:13:58.742385 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d7cbwj_aa9a2254-fecc-4d77-bd00-c665acb7efe2/extract/0.log" Sep 29 22:13:58 crc kubenswrapper[4911]: I0929 22:13:58.750984 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d7cbwj_aa9a2254-fecc-4d77-bd00-c665acb7efe2/util/0.log" Sep 29 22:13:58 crc kubenswrapper[4911]: I0929 22:13:58.933664 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c0frlgc_98ba0cf0-64a8-482d-af55-862d182fefdb/util/0.log" Sep 29 22:13:59 crc kubenswrapper[4911]: I0929 22:13:59.112663 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c0frlgc_98ba0cf0-64a8-482d-af55-862d182fefdb/util/0.log" Sep 29 22:13:59 crc kubenswrapper[4911]: I0929 22:13:59.148776 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c0frlgc_98ba0cf0-64a8-482d-af55-862d182fefdb/pull/0.log" Sep 29 22:13:59 crc kubenswrapper[4911]: I0929 22:13:59.172576 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c0frlgc_98ba0cf0-64a8-482d-af55-862d182fefdb/pull/0.log" Sep 29 22:13:59 crc kubenswrapper[4911]: I0929 22:13:59.346959 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c0frlgc_98ba0cf0-64a8-482d-af55-862d182fefdb/extract/0.log" Sep 29 22:13:59 crc kubenswrapper[4911]: I0929 22:13:59.371025 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c0frlgc_98ba0cf0-64a8-482d-af55-862d182fefdb/pull/0.log" Sep 29 22:13:59 crc kubenswrapper[4911]: I0929 22:13:59.417834 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c0frlgc_98ba0cf0-64a8-482d-af55-862d182fefdb/util/0.log" Sep 29 22:13:59 crc kubenswrapper[4911]: I0929 22:13:59.526422 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-59bsf_053ef77e-10da-46b9-bc85-77d52f64b576/extract-utilities/0.log" Sep 29 22:13:59 crc kubenswrapper[4911]: I0929 22:13:59.733346 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-59bsf_053ef77e-10da-46b9-bc85-77d52f64b576/extract-utilities/0.log" Sep 29 22:13:59 crc kubenswrapper[4911]: I0929 22:13:59.733451 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-59bsf_053ef77e-10da-46b9-bc85-77d52f64b576/extract-content/0.log" Sep 29 22:13:59 crc kubenswrapper[4911]: I0929 22:13:59.784275 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-59bsf_053ef77e-10da-46b9-bc85-77d52f64b576/extract-content/0.log" Sep 29 22:13:59 crc kubenswrapper[4911]: I0929 22:13:59.949204 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-59bsf_053ef77e-10da-46b9-bc85-77d52f64b576/extract-utilities/0.log" Sep 29 22:14:00 crc kubenswrapper[4911]: I0929 22:14:00.000227 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-59bsf_053ef77e-10da-46b9-bc85-77d52f64b576/extract-content/0.log" Sep 29 22:14:00 crc kubenswrapper[4911]: I0929 22:14:00.131859 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-wxv47_d2681ca4-c395-47c2-8145-a1da21e6f46f/extract-utilities/0.log" Sep 29 22:14:00 crc kubenswrapper[4911]: I0929 22:14:00.383489 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-wxv47_d2681ca4-c395-47c2-8145-a1da21e6f46f/extract-content/0.log" Sep 29 22:14:00 crc kubenswrapper[4911]: I0929 22:14:00.394509 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-wxv47_d2681ca4-c395-47c2-8145-a1da21e6f46f/extract-utilities/0.log" Sep 29 22:14:00 crc kubenswrapper[4911]: I0929 22:14:00.400456 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-59bsf_053ef77e-10da-46b9-bc85-77d52f64b576/registry-server/0.log" Sep 29 22:14:00 crc kubenswrapper[4911]: I0929 22:14:00.404434 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-wxv47_d2681ca4-c395-47c2-8145-a1da21e6f46f/extract-content/0.log" Sep 29 22:14:00 crc kubenswrapper[4911]: I0929 22:14:00.576279 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-wxv47_d2681ca4-c395-47c2-8145-a1da21e6f46f/extract-utilities/0.log" Sep 29 22:14:00 crc kubenswrapper[4911]: I0929 22:14:00.603343 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-wxv47_d2681ca4-c395-47c2-8145-a1da21e6f46f/extract-content/0.log" Sep 29 22:14:00 crc kubenswrapper[4911]: I0929 22:14:00.636230 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170vvfsx_e417e0ba-3b3f-4700-8921-345cc400b7ba/util/0.log" Sep 29 22:14:00 crc kubenswrapper[4911]: I0929 22:14:00.904193 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170vvfsx_e417e0ba-3b3f-4700-8921-345cc400b7ba/pull/0.log" Sep 29 22:14:00 crc kubenswrapper[4911]: I0929 22:14:00.917528 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170vvfsx_e417e0ba-3b3f-4700-8921-345cc400b7ba/pull/0.log" Sep 29 22:14:01 crc kubenswrapper[4911]: I0929 22:14:01.012892 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170vvfsx_e417e0ba-3b3f-4700-8921-345cc400b7ba/util/0.log" Sep 29 22:14:01 crc kubenswrapper[4911]: I0929 22:14:01.016911 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-wxv47_d2681ca4-c395-47c2-8145-a1da21e6f46f/registry-server/0.log" Sep 29 22:14:01 crc kubenswrapper[4911]: I0929 22:14:01.171296 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170vvfsx_e417e0ba-3b3f-4700-8921-345cc400b7ba/pull/0.log" Sep 29 22:14:01 crc kubenswrapper[4911]: I0929 22:14:01.193742 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170vvfsx_e417e0ba-3b3f-4700-8921-345cc400b7ba/util/0.log" Sep 29 22:14:01 crc kubenswrapper[4911]: I0929 22:14:01.196091 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170vvfsx_e417e0ba-3b3f-4700-8921-345cc400b7ba/extract/0.log" Sep 29 22:14:01 crc kubenswrapper[4911]: I0929 22:14:01.272923 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h4chr_d2367d33-fcb0-4433-a6cf-9dfc7e2b9bd7/util/0.log" Sep 29 22:14:01 crc kubenswrapper[4911]: I0929 22:14:01.363073 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h4chr_d2367d33-fcb0-4433-a6cf-9dfc7e2b9bd7/util/0.log" Sep 29 22:14:01 crc kubenswrapper[4911]: I0929 22:14:01.471159 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h4chr_d2367d33-fcb0-4433-a6cf-9dfc7e2b9bd7/pull/0.log" Sep 29 22:14:01 crc kubenswrapper[4911]: I0929 22:14:01.600241 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h4chr_d2367d33-fcb0-4433-a6cf-9dfc7e2b9bd7/pull/0.log" Sep 29 22:14:01 crc kubenswrapper[4911]: I0929 22:14:01.736881 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h4chr_d2367d33-fcb0-4433-a6cf-9dfc7e2b9bd7/pull/0.log" Sep 29 22:14:01 crc kubenswrapper[4911]: I0929 22:14:01.747685 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h4chr_d2367d33-fcb0-4433-a6cf-9dfc7e2b9bd7/util/0.log" Sep 29 22:14:01 crc kubenswrapper[4911]: I0929 22:14:01.784440 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-pbd48_8089b532-3c10-498a-9558-7b5d845d6c7e/marketplace-operator/0.log" Sep 29 22:14:01 crc kubenswrapper[4911]: I0929 22:14:01.820649 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h4chr_d2367d33-fcb0-4433-a6cf-9dfc7e2b9bd7/extract/0.log" Sep 29 22:14:01 crc kubenswrapper[4911]: I0929 22:14:01.900257 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-rm59v_61bf2025-9b8f-4cbb-8667-c58e05bb8706/extract-utilities/0.log" Sep 29 22:14:02 crc kubenswrapper[4911]: I0929 22:14:02.060124 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-rm59v_61bf2025-9b8f-4cbb-8667-c58e05bb8706/extract-content/0.log" Sep 29 22:14:02 crc kubenswrapper[4911]: I0929 22:14:02.067582 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-rm59v_61bf2025-9b8f-4cbb-8667-c58e05bb8706/extract-utilities/0.log" Sep 29 22:14:02 crc kubenswrapper[4911]: I0929 22:14:02.075866 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-rm59v_61bf2025-9b8f-4cbb-8667-c58e05bb8706/extract-content/0.log" Sep 29 22:14:02 crc kubenswrapper[4911]: I0929 22:14:02.243612 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-rm59v_61bf2025-9b8f-4cbb-8667-c58e05bb8706/extract-content/0.log" Sep 29 22:14:02 crc kubenswrapper[4911]: I0929 22:14:02.292425 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-7578h_d3e76ef6-306e-4953-a379-367d277b9db4/extract-utilities/0.log" Sep 29 22:14:02 crc kubenswrapper[4911]: I0929 22:14:02.293450 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-rm59v_61bf2025-9b8f-4cbb-8667-c58e05bb8706/extract-utilities/0.log" Sep 29 22:14:02 crc kubenswrapper[4911]: I0929 22:14:02.378301 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-rm59v_61bf2025-9b8f-4cbb-8667-c58e05bb8706/registry-server/0.log" Sep 29 22:14:02 crc kubenswrapper[4911]: I0929 22:14:02.505359 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-7578h_d3e76ef6-306e-4953-a379-367d277b9db4/extract-content/0.log" Sep 29 22:14:02 crc kubenswrapper[4911]: I0929 22:14:02.515385 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-7578h_d3e76ef6-306e-4953-a379-367d277b9db4/extract-utilities/0.log" Sep 29 22:14:02 crc kubenswrapper[4911]: I0929 22:14:02.520933 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-7578h_d3e76ef6-306e-4953-a379-367d277b9db4/extract-content/0.log" Sep 29 22:14:02 crc kubenswrapper[4911]: I0929 22:14:02.697930 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-7578h_d3e76ef6-306e-4953-a379-367d277b9db4/extract-content/0.log" Sep 29 22:14:02 crc kubenswrapper[4911]: I0929 22:14:02.722220 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-7578h_d3e76ef6-306e-4953-a379-367d277b9db4/extract-utilities/0.log" Sep 29 22:14:03 crc kubenswrapper[4911]: I0929 22:14:03.016843 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-7578h_d3e76ef6-306e-4953-a379-367d277b9db4/registry-server/0.log" Sep 29 22:14:04 crc kubenswrapper[4911]: I0929 22:14:04.704221 4911 scope.go:117] "RemoveContainer" containerID="fed63abb46220bf90b5156f960830a58b0cc2be8812d554b2887537e4211823f" Sep 29 22:14:04 crc kubenswrapper[4911]: E0929 22:14:04.704666 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w647f_openshift-machine-config-operator(50640abc-40db-4390-82d1-f3cfc76da71c)\"" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" Sep 29 22:14:14 crc kubenswrapper[4911]: I0929 22:14:14.950698 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-7c8cf85677-vg7l9_fec367b2-98aa-4597-b64f-b5bdc79b0663/prometheus-operator/0.log" Sep 29 22:14:15 crc kubenswrapper[4911]: I0929 22:14:15.145605 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-5958f48f7b-jrc7l_84376021-7e21-4053-ae59-3665a4c9c507/prometheus-operator-admission-webhook/0.log" Sep 29 22:14:15 crc kubenswrapper[4911]: I0929 22:14:15.198585 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-5958f48f7b-lprbm_ac733ce2-123b-4300-ae15-adffd62f927a/prometheus-operator-admission-webhook/0.log" Sep 29 22:14:15 crc kubenswrapper[4911]: I0929 22:14:15.349022 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-cc5f78dfc-75pqc_31c4b3c7-c9bd-4b98-bb0d-b2240bddaa12/operator/0.log" Sep 29 22:14:15 crc kubenswrapper[4911]: I0929 22:14:15.375114 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-54bc95c9fb-xvj9h_e8f2e1f7-e311-4126-853a-a85eac6e689c/perses-operator/0.log" Sep 29 22:14:16 crc kubenswrapper[4911]: I0929 22:14:16.701473 4911 scope.go:117] "RemoveContainer" containerID="fed63abb46220bf90b5156f960830a58b0cc2be8812d554b2887537e4211823f" Sep 29 22:14:16 crc kubenswrapper[4911]: E0929 22:14:16.701831 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w647f_openshift-machine-config-operator(50640abc-40db-4390-82d1-f3cfc76da71c)\"" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" Sep 29 22:14:26 crc kubenswrapper[4911]: I0929 22:14:26.862355 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-7797b598cb-5xvfp_3ded07bd-2737-4d5d-8265-6c2e38f653d8/kube-rbac-proxy/0.log" Sep 29 22:14:26 crc kubenswrapper[4911]: I0929 22:14:26.864782 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-7797b598cb-5xvfp_3ded07bd-2737-4d5d-8265-6c2e38f653d8/manager/0.log" Sep 29 22:14:29 crc kubenswrapper[4911]: I0929 22:14:29.702172 4911 scope.go:117] "RemoveContainer" containerID="fed63abb46220bf90b5156f960830a58b0cc2be8812d554b2887537e4211823f" Sep 29 22:14:29 crc kubenswrapper[4911]: E0929 22:14:29.702707 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w647f_openshift-machine-config-operator(50640abc-40db-4390-82d1-f3cfc76da71c)\"" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" Sep 29 22:14:43 crc kubenswrapper[4911]: I0929 22:14:43.701785 4911 scope.go:117] "RemoveContainer" containerID="fed63abb46220bf90b5156f960830a58b0cc2be8812d554b2887537e4211823f" Sep 29 22:14:43 crc kubenswrapper[4911]: E0929 22:14:43.702622 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w647f_openshift-machine-config-operator(50640abc-40db-4390-82d1-f3cfc76da71c)\"" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" Sep 29 22:14:57 crc kubenswrapper[4911]: I0929 22:14:57.701946 4911 scope.go:117] "RemoveContainer" containerID="fed63abb46220bf90b5156f960830a58b0cc2be8812d554b2887537e4211823f" Sep 29 22:14:57 crc kubenswrapper[4911]: E0929 22:14:57.703351 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w647f_openshift-machine-config-operator(50640abc-40db-4390-82d1-f3cfc76da71c)\"" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" Sep 29 22:15:00 crc kubenswrapper[4911]: I0929 22:15:00.235268 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319735-mtqq5"] Sep 29 22:15:00 crc kubenswrapper[4911]: E0929 22:15:00.236457 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae3c6a09-9b3d-42ea-bffb-dd34c90fe8dc" containerName="extract-content" Sep 29 22:15:00 crc kubenswrapper[4911]: I0929 22:15:00.236481 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae3c6a09-9b3d-42ea-bffb-dd34c90fe8dc" containerName="extract-content" Sep 29 22:15:00 crc kubenswrapper[4911]: E0929 22:15:00.236538 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae3c6a09-9b3d-42ea-bffb-dd34c90fe8dc" containerName="extract-utilities" Sep 29 22:15:00 crc kubenswrapper[4911]: I0929 22:15:00.236552 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae3c6a09-9b3d-42ea-bffb-dd34c90fe8dc" containerName="extract-utilities" Sep 29 22:15:00 crc kubenswrapper[4911]: E0929 22:15:00.236569 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae3c6a09-9b3d-42ea-bffb-dd34c90fe8dc" containerName="registry-server" Sep 29 22:15:00 crc kubenswrapper[4911]: I0929 22:15:00.236582 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae3c6a09-9b3d-42ea-bffb-dd34c90fe8dc" containerName="registry-server" Sep 29 22:15:00 crc kubenswrapper[4911]: I0929 22:15:00.237013 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae3c6a09-9b3d-42ea-bffb-dd34c90fe8dc" containerName="registry-server" Sep 29 22:15:00 crc kubenswrapper[4911]: I0929 22:15:00.238165 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319735-mtqq5" Sep 29 22:15:00 crc kubenswrapper[4911]: I0929 22:15:00.240965 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 29 22:15:00 crc kubenswrapper[4911]: I0929 22:15:00.240976 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 29 22:15:00 crc kubenswrapper[4911]: I0929 22:15:00.251363 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319735-mtqq5"] Sep 29 22:15:00 crc kubenswrapper[4911]: I0929 22:15:00.367463 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9de4243a-5b4b-4e71-a504-28744cbbbd55-config-volume\") pod \"collect-profiles-29319735-mtqq5\" (UID: \"9de4243a-5b4b-4e71-a504-28744cbbbd55\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319735-mtqq5" Sep 29 22:15:00 crc kubenswrapper[4911]: I0929 22:15:00.367557 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9de4243a-5b4b-4e71-a504-28744cbbbd55-secret-volume\") pod \"collect-profiles-29319735-mtqq5\" (UID: \"9de4243a-5b4b-4e71-a504-28744cbbbd55\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319735-mtqq5" Sep 29 22:15:00 crc kubenswrapper[4911]: I0929 22:15:00.367670 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vhnst\" (UniqueName: \"kubernetes.io/projected/9de4243a-5b4b-4e71-a504-28744cbbbd55-kube-api-access-vhnst\") pod \"collect-profiles-29319735-mtqq5\" (UID: \"9de4243a-5b4b-4e71-a504-28744cbbbd55\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319735-mtqq5" Sep 29 22:15:00 crc kubenswrapper[4911]: I0929 22:15:00.470609 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vhnst\" (UniqueName: \"kubernetes.io/projected/9de4243a-5b4b-4e71-a504-28744cbbbd55-kube-api-access-vhnst\") pod \"collect-profiles-29319735-mtqq5\" (UID: \"9de4243a-5b4b-4e71-a504-28744cbbbd55\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319735-mtqq5" Sep 29 22:15:00 crc kubenswrapper[4911]: I0929 22:15:00.471089 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9de4243a-5b4b-4e71-a504-28744cbbbd55-config-volume\") pod \"collect-profiles-29319735-mtqq5\" (UID: \"9de4243a-5b4b-4e71-a504-28744cbbbd55\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319735-mtqq5" Sep 29 22:15:00 crc kubenswrapper[4911]: I0929 22:15:00.471391 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9de4243a-5b4b-4e71-a504-28744cbbbd55-secret-volume\") pod \"collect-profiles-29319735-mtqq5\" (UID: \"9de4243a-5b4b-4e71-a504-28744cbbbd55\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319735-mtqq5" Sep 29 22:15:00 crc kubenswrapper[4911]: I0929 22:15:00.471888 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9de4243a-5b4b-4e71-a504-28744cbbbd55-config-volume\") pod \"collect-profiles-29319735-mtqq5\" (UID: \"9de4243a-5b4b-4e71-a504-28744cbbbd55\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319735-mtqq5" Sep 29 22:15:00 crc kubenswrapper[4911]: I0929 22:15:00.478256 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9de4243a-5b4b-4e71-a504-28744cbbbd55-secret-volume\") pod \"collect-profiles-29319735-mtqq5\" (UID: \"9de4243a-5b4b-4e71-a504-28744cbbbd55\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319735-mtqq5" Sep 29 22:15:00 crc kubenswrapper[4911]: I0929 22:15:00.502646 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vhnst\" (UniqueName: \"kubernetes.io/projected/9de4243a-5b4b-4e71-a504-28744cbbbd55-kube-api-access-vhnst\") pod \"collect-profiles-29319735-mtqq5\" (UID: \"9de4243a-5b4b-4e71-a504-28744cbbbd55\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319735-mtqq5" Sep 29 22:15:00 crc kubenswrapper[4911]: I0929 22:15:00.569399 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319735-mtqq5" Sep 29 22:15:01 crc kubenswrapper[4911]: I0929 22:15:01.016322 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319735-mtqq5"] Sep 29 22:15:01 crc kubenswrapper[4911]: W0929 22:15:01.017990 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9de4243a_5b4b_4e71_a504_28744cbbbd55.slice/crio-9b9c1ae91cedb13eaf459b32f19300ef53e59ad89eb7aa5a77426383c32a8782 WatchSource:0}: Error finding container 9b9c1ae91cedb13eaf459b32f19300ef53e59ad89eb7aa5a77426383c32a8782: Status 404 returned error can't find the container with id 9b9c1ae91cedb13eaf459b32f19300ef53e59ad89eb7aa5a77426383c32a8782 Sep 29 22:15:01 crc kubenswrapper[4911]: I0929 22:15:01.256942 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319735-mtqq5" event={"ID":"9de4243a-5b4b-4e71-a504-28744cbbbd55","Type":"ContainerStarted","Data":"27d77ca7fcf52f19935b3bd4e340c0eb953ddf62bce532c046c60e4a3e2b973f"} Sep 29 22:15:01 crc kubenswrapper[4911]: I0929 22:15:01.257224 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319735-mtqq5" event={"ID":"9de4243a-5b4b-4e71-a504-28744cbbbd55","Type":"ContainerStarted","Data":"9b9c1ae91cedb13eaf459b32f19300ef53e59ad89eb7aa5a77426383c32a8782"} Sep 29 22:15:01 crc kubenswrapper[4911]: I0929 22:15:01.275449 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29319735-mtqq5" podStartSLOduration=1.275432921 podStartE2EDuration="1.275432921s" podCreationTimestamp="2025-09-29 22:15:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:15:01.27154512 +0000 UTC m=+2979.248657801" watchObservedRunningTime="2025-09-29 22:15:01.275432921 +0000 UTC m=+2979.252545602" Sep 29 22:15:01 crc kubenswrapper[4911]: E0929 22:15:01.853991 4911 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9de4243a_5b4b_4e71_a504_28744cbbbd55.slice/crio-conmon-27d77ca7fcf52f19935b3bd4e340c0eb953ddf62bce532c046c60e4a3e2b973f.scope\": RecentStats: unable to find data in memory cache]" Sep 29 22:15:02 crc kubenswrapper[4911]: I0929 22:15:02.267436 4911 generic.go:334] "Generic (PLEG): container finished" podID="9de4243a-5b4b-4e71-a504-28744cbbbd55" containerID="27d77ca7fcf52f19935b3bd4e340c0eb953ddf62bce532c046c60e4a3e2b973f" exitCode=0 Sep 29 22:15:02 crc kubenswrapper[4911]: I0929 22:15:02.267488 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319735-mtqq5" event={"ID":"9de4243a-5b4b-4e71-a504-28744cbbbd55","Type":"ContainerDied","Data":"27d77ca7fcf52f19935b3bd4e340c0eb953ddf62bce532c046c60e4a3e2b973f"} Sep 29 22:15:03 crc kubenswrapper[4911]: I0929 22:15:03.652873 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319735-mtqq5" Sep 29 22:15:03 crc kubenswrapper[4911]: I0929 22:15:03.738815 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9de4243a-5b4b-4e71-a504-28744cbbbd55-config-volume\") pod \"9de4243a-5b4b-4e71-a504-28744cbbbd55\" (UID: \"9de4243a-5b4b-4e71-a504-28744cbbbd55\") " Sep 29 22:15:03 crc kubenswrapper[4911]: I0929 22:15:03.739025 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vhnst\" (UniqueName: \"kubernetes.io/projected/9de4243a-5b4b-4e71-a504-28744cbbbd55-kube-api-access-vhnst\") pod \"9de4243a-5b4b-4e71-a504-28744cbbbd55\" (UID: \"9de4243a-5b4b-4e71-a504-28744cbbbd55\") " Sep 29 22:15:03 crc kubenswrapper[4911]: I0929 22:15:03.739088 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9de4243a-5b4b-4e71-a504-28744cbbbd55-secret-volume\") pod \"9de4243a-5b4b-4e71-a504-28744cbbbd55\" (UID: \"9de4243a-5b4b-4e71-a504-28744cbbbd55\") " Sep 29 22:15:03 crc kubenswrapper[4911]: I0929 22:15:03.739937 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9de4243a-5b4b-4e71-a504-28744cbbbd55-config-volume" (OuterVolumeSpecName: "config-volume") pod "9de4243a-5b4b-4e71-a504-28744cbbbd55" (UID: "9de4243a-5b4b-4e71-a504-28744cbbbd55"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:15:03 crc kubenswrapper[4911]: I0929 22:15:03.747005 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9de4243a-5b4b-4e71-a504-28744cbbbd55-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "9de4243a-5b4b-4e71-a504-28744cbbbd55" (UID: "9de4243a-5b4b-4e71-a504-28744cbbbd55"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:15:03 crc kubenswrapper[4911]: I0929 22:15:03.747116 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9de4243a-5b4b-4e71-a504-28744cbbbd55-kube-api-access-vhnst" (OuterVolumeSpecName: "kube-api-access-vhnst") pod "9de4243a-5b4b-4e71-a504-28744cbbbd55" (UID: "9de4243a-5b4b-4e71-a504-28744cbbbd55"). InnerVolumeSpecName "kube-api-access-vhnst". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:15:03 crc kubenswrapper[4911]: I0929 22:15:03.841687 4911 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9de4243a-5b4b-4e71-a504-28744cbbbd55-config-volume\") on node \"crc\" DevicePath \"\"" Sep 29 22:15:03 crc kubenswrapper[4911]: I0929 22:15:03.841718 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vhnst\" (UniqueName: \"kubernetes.io/projected/9de4243a-5b4b-4e71-a504-28744cbbbd55-kube-api-access-vhnst\") on node \"crc\" DevicePath \"\"" Sep 29 22:15:03 crc kubenswrapper[4911]: I0929 22:15:03.841729 4911 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9de4243a-5b4b-4e71-a504-28744cbbbd55-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 29 22:15:04 crc kubenswrapper[4911]: I0929 22:15:04.289601 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319735-mtqq5" event={"ID":"9de4243a-5b4b-4e71-a504-28744cbbbd55","Type":"ContainerDied","Data":"9b9c1ae91cedb13eaf459b32f19300ef53e59ad89eb7aa5a77426383c32a8782"} Sep 29 22:15:04 crc kubenswrapper[4911]: I0929 22:15:04.289908 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9b9c1ae91cedb13eaf459b32f19300ef53e59ad89eb7aa5a77426383c32a8782" Sep 29 22:15:04 crc kubenswrapper[4911]: I0929 22:15:04.289759 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319735-mtqq5" Sep 29 22:15:04 crc kubenswrapper[4911]: I0929 22:15:04.366181 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319690-42m2v"] Sep 29 22:15:04 crc kubenswrapper[4911]: I0929 22:15:04.376846 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319690-42m2v"] Sep 29 22:15:04 crc kubenswrapper[4911]: I0929 22:15:04.725649 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9a3bdae8-c904-4591-bb7d-0e00a24975fb" path="/var/lib/kubelet/pods/9a3bdae8-c904-4591-bb7d-0e00a24975fb/volumes" Sep 29 22:15:11 crc kubenswrapper[4911]: I0929 22:15:11.702921 4911 scope.go:117] "RemoveContainer" containerID="fed63abb46220bf90b5156f960830a58b0cc2be8812d554b2887537e4211823f" Sep 29 22:15:11 crc kubenswrapper[4911]: E0929 22:15:11.703544 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w647f_openshift-machine-config-operator(50640abc-40db-4390-82d1-f3cfc76da71c)\"" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" Sep 29 22:15:26 crc kubenswrapper[4911]: I0929 22:15:26.701262 4911 scope.go:117] "RemoveContainer" containerID="fed63abb46220bf90b5156f960830a58b0cc2be8812d554b2887537e4211823f" Sep 29 22:15:26 crc kubenswrapper[4911]: E0929 22:15:26.701911 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w647f_openshift-machine-config-operator(50640abc-40db-4390-82d1-f3cfc76da71c)\"" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" Sep 29 22:15:37 crc kubenswrapper[4911]: I0929 22:15:37.701698 4911 scope.go:117] "RemoveContainer" containerID="fed63abb46220bf90b5156f960830a58b0cc2be8812d554b2887537e4211823f" Sep 29 22:15:37 crc kubenswrapper[4911]: E0929 22:15:37.702751 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w647f_openshift-machine-config-operator(50640abc-40db-4390-82d1-f3cfc76da71c)\"" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" Sep 29 22:15:38 crc kubenswrapper[4911]: I0929 22:15:38.771662 4911 scope.go:117] "RemoveContainer" containerID="dd07c683178b49310e59bb68eff005164f906e6bdaa0d135377fa59e6c19f44a" Sep 29 22:15:52 crc kubenswrapper[4911]: I0929 22:15:52.709202 4911 scope.go:117] "RemoveContainer" containerID="fed63abb46220bf90b5156f960830a58b0cc2be8812d554b2887537e4211823f" Sep 29 22:15:52 crc kubenswrapper[4911]: E0929 22:15:52.709882 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w647f_openshift-machine-config-operator(50640abc-40db-4390-82d1-f3cfc76da71c)\"" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" Sep 29 22:16:04 crc kubenswrapper[4911]: I0929 22:16:04.701515 4911 scope.go:117] "RemoveContainer" containerID="fed63abb46220bf90b5156f960830a58b0cc2be8812d554b2887537e4211823f" Sep 29 22:16:04 crc kubenswrapper[4911]: E0929 22:16:04.702163 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w647f_openshift-machine-config-operator(50640abc-40db-4390-82d1-f3cfc76da71c)\"" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" Sep 29 22:16:13 crc kubenswrapper[4911]: I0929 22:16:13.043545 4911 generic.go:334] "Generic (PLEG): container finished" podID="59852fea-b786-4bdc-9542-693035a9a063" containerID="e2996291d7122c0af470e4007c40beed48ed2660789cdef200bacb4207106cbb" exitCode=0 Sep 29 22:16:13 crc kubenswrapper[4911]: I0929 22:16:13.043665 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-nc626/must-gather-v6s7x" event={"ID":"59852fea-b786-4bdc-9542-693035a9a063","Type":"ContainerDied","Data":"e2996291d7122c0af470e4007c40beed48ed2660789cdef200bacb4207106cbb"} Sep 29 22:16:13 crc kubenswrapper[4911]: I0929 22:16:13.045289 4911 scope.go:117] "RemoveContainer" containerID="e2996291d7122c0af470e4007c40beed48ed2660789cdef200bacb4207106cbb" Sep 29 22:16:14 crc kubenswrapper[4911]: I0929 22:16:14.049550 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-nc626_must-gather-v6s7x_59852fea-b786-4bdc-9542-693035a9a063/gather/0.log" Sep 29 22:16:15 crc kubenswrapper[4911]: I0929 22:16:15.701986 4911 scope.go:117] "RemoveContainer" containerID="fed63abb46220bf90b5156f960830a58b0cc2be8812d554b2887537e4211823f" Sep 29 22:16:15 crc kubenswrapper[4911]: E0929 22:16:15.702575 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w647f_openshift-machine-config-operator(50640abc-40db-4390-82d1-f3cfc76da71c)\"" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" Sep 29 22:16:21 crc kubenswrapper[4911]: I0929 22:16:21.834978 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-nc626/must-gather-v6s7x"] Sep 29 22:16:21 crc kubenswrapper[4911]: I0929 22:16:21.835690 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-nc626/must-gather-v6s7x" podUID="59852fea-b786-4bdc-9542-693035a9a063" containerName="copy" containerID="cri-o://3c0385b583e21d41359438d700f89d3311f20c42470ccbdbf89154e491bb12cb" gracePeriod=2 Sep 29 22:16:21 crc kubenswrapper[4911]: I0929 22:16:21.861607 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-nc626/must-gather-v6s7x"] Sep 29 22:16:22 crc kubenswrapper[4911]: I0929 22:16:22.139430 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-nc626_must-gather-v6s7x_59852fea-b786-4bdc-9542-693035a9a063/copy/0.log" Sep 29 22:16:22 crc kubenswrapper[4911]: I0929 22:16:22.139841 4911 generic.go:334] "Generic (PLEG): container finished" podID="59852fea-b786-4bdc-9542-693035a9a063" containerID="3c0385b583e21d41359438d700f89d3311f20c42470ccbdbf89154e491bb12cb" exitCode=143 Sep 29 22:16:22 crc kubenswrapper[4911]: I0929 22:16:22.827103 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-nc626_must-gather-v6s7x_59852fea-b786-4bdc-9542-693035a9a063/copy/0.log" Sep 29 22:16:22 crc kubenswrapper[4911]: I0929 22:16:22.827921 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-nc626/must-gather-v6s7x" Sep 29 22:16:23 crc kubenswrapper[4911]: I0929 22:16:23.024737 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/59852fea-b786-4bdc-9542-693035a9a063-must-gather-output\") pod \"59852fea-b786-4bdc-9542-693035a9a063\" (UID: \"59852fea-b786-4bdc-9542-693035a9a063\") " Sep 29 22:16:23 crc kubenswrapper[4911]: I0929 22:16:23.024800 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8pdkk\" (UniqueName: \"kubernetes.io/projected/59852fea-b786-4bdc-9542-693035a9a063-kube-api-access-8pdkk\") pod \"59852fea-b786-4bdc-9542-693035a9a063\" (UID: \"59852fea-b786-4bdc-9542-693035a9a063\") " Sep 29 22:16:23 crc kubenswrapper[4911]: I0929 22:16:23.032974 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/59852fea-b786-4bdc-9542-693035a9a063-kube-api-access-8pdkk" (OuterVolumeSpecName: "kube-api-access-8pdkk") pod "59852fea-b786-4bdc-9542-693035a9a063" (UID: "59852fea-b786-4bdc-9542-693035a9a063"). InnerVolumeSpecName "kube-api-access-8pdkk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:16:23 crc kubenswrapper[4911]: I0929 22:16:23.127824 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8pdkk\" (UniqueName: \"kubernetes.io/projected/59852fea-b786-4bdc-9542-693035a9a063-kube-api-access-8pdkk\") on node \"crc\" DevicePath \"\"" Sep 29 22:16:23 crc kubenswrapper[4911]: I0929 22:16:23.195656 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-nc626_must-gather-v6s7x_59852fea-b786-4bdc-9542-693035a9a063/copy/0.log" Sep 29 22:16:23 crc kubenswrapper[4911]: I0929 22:16:23.196811 4911 scope.go:117] "RemoveContainer" containerID="3c0385b583e21d41359438d700f89d3311f20c42470ccbdbf89154e491bb12cb" Sep 29 22:16:23 crc kubenswrapper[4911]: I0929 22:16:23.197024 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-nc626/must-gather-v6s7x" Sep 29 22:16:23 crc kubenswrapper[4911]: I0929 22:16:23.261460 4911 scope.go:117] "RemoveContainer" containerID="e2996291d7122c0af470e4007c40beed48ed2660789cdef200bacb4207106cbb" Sep 29 22:16:23 crc kubenswrapper[4911]: I0929 22:16:23.302838 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/59852fea-b786-4bdc-9542-693035a9a063-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "59852fea-b786-4bdc-9542-693035a9a063" (UID: "59852fea-b786-4bdc-9542-693035a9a063"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:16:23 crc kubenswrapper[4911]: I0929 22:16:23.331927 4911 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/59852fea-b786-4bdc-9542-693035a9a063-must-gather-output\") on node \"crc\" DevicePath \"\"" Sep 29 22:16:24 crc kubenswrapper[4911]: I0929 22:16:24.715070 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="59852fea-b786-4bdc-9542-693035a9a063" path="/var/lib/kubelet/pods/59852fea-b786-4bdc-9542-693035a9a063/volumes" Sep 29 22:16:28 crc kubenswrapper[4911]: I0929 22:16:28.700923 4911 scope.go:117] "RemoveContainer" containerID="fed63abb46220bf90b5156f960830a58b0cc2be8812d554b2887537e4211823f" Sep 29 22:16:28 crc kubenswrapper[4911]: E0929 22:16:28.701995 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w647f_openshift-machine-config-operator(50640abc-40db-4390-82d1-f3cfc76da71c)\"" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" Sep 29 22:16:38 crc kubenswrapper[4911]: I0929 22:16:38.860060 4911 scope.go:117] "RemoveContainer" containerID="5fb4458e80d6d3d992520ddb9ae20e416cdc8b3f552ec341393f8c2f8bc63d43" Sep 29 22:16:39 crc kubenswrapper[4911]: I0929 22:16:39.701903 4911 scope.go:117] "RemoveContainer" containerID="fed63abb46220bf90b5156f960830a58b0cc2be8812d554b2887537e4211823f" Sep 29 22:16:39 crc kubenswrapper[4911]: E0929 22:16:39.702721 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w647f_openshift-machine-config-operator(50640abc-40db-4390-82d1-f3cfc76da71c)\"" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" Sep 29 22:16:54 crc kubenswrapper[4911]: I0929 22:16:54.701421 4911 scope.go:117] "RemoveContainer" containerID="fed63abb46220bf90b5156f960830a58b0cc2be8812d554b2887537e4211823f" Sep 29 22:16:54 crc kubenswrapper[4911]: E0929 22:16:54.702510 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w647f_openshift-machine-config-operator(50640abc-40db-4390-82d1-f3cfc76da71c)\"" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" Sep 29 22:17:07 crc kubenswrapper[4911]: I0929 22:17:07.701565 4911 scope.go:117] "RemoveContainer" containerID="fed63abb46220bf90b5156f960830a58b0cc2be8812d554b2887537e4211823f" Sep 29 22:17:07 crc kubenswrapper[4911]: E0929 22:17:07.703004 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w647f_openshift-machine-config-operator(50640abc-40db-4390-82d1-f3cfc76da71c)\"" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" Sep 29 22:17:19 crc kubenswrapper[4911]: I0929 22:17:19.701886 4911 scope.go:117] "RemoveContainer" containerID="fed63abb46220bf90b5156f960830a58b0cc2be8812d554b2887537e4211823f" Sep 29 22:17:19 crc kubenswrapper[4911]: E0929 22:17:19.703267 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w647f_openshift-machine-config-operator(50640abc-40db-4390-82d1-f3cfc76da71c)\"" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" Sep 29 22:17:34 crc kubenswrapper[4911]: I0929 22:17:34.700942 4911 scope.go:117] "RemoveContainer" containerID="fed63abb46220bf90b5156f960830a58b0cc2be8812d554b2887537e4211823f" Sep 29 22:17:34 crc kubenswrapper[4911]: E0929 22:17:34.701918 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w647f_openshift-machine-config-operator(50640abc-40db-4390-82d1-f3cfc76da71c)\"" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" Sep 29 22:17:46 crc kubenswrapper[4911]: I0929 22:17:46.701830 4911 scope.go:117] "RemoveContainer" containerID="fed63abb46220bf90b5156f960830a58b0cc2be8812d554b2887537e4211823f" Sep 29 22:17:46 crc kubenswrapper[4911]: E0929 22:17:46.703289 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w647f_openshift-machine-config-operator(50640abc-40db-4390-82d1-f3cfc76da71c)\"" pod="openshift-machine-config-operator/machine-config-daemon-w647f" podUID="50640abc-40db-4390-82d1-f3cfc76da71c" Sep 29 22:18:00 crc kubenswrapper[4911]: I0929 22:18:00.701033 4911 scope.go:117] "RemoveContainer" containerID="fed63abb46220bf90b5156f960830a58b0cc2be8812d554b2887537e4211823f" Sep 29 22:18:01 crc kubenswrapper[4911]: I0929 22:18:01.356423 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w647f" event={"ID":"50640abc-40db-4390-82d1-f3cfc76da71c","Type":"ContainerStarted","Data":"d08089ef9f981d6dc7957b01271a0c12b2a2f3b3b18197801deffb2c45bb2040"} var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515066602677024464 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015066602677017401 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015066574204016516 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015066574204015466 5ustar corecore